var/home/core/zuul-output/0000755000175000017500000000000015113746224014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113760615015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004736653615113760607017727 0ustar rootrootDec 03 05:41:15 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 05:41:15 crc restorecon[4710]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 05:41:15 crc restorecon[4710]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 05:41:16 crc kubenswrapper[4810]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 05:41:16 crc kubenswrapper[4810]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 05:41:16 crc kubenswrapper[4810]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 05:41:16 crc kubenswrapper[4810]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 05:41:16 crc kubenswrapper[4810]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 05:41:16 crc kubenswrapper[4810]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.189425 4810 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192621 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192638 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192643 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192646 4810 feature_gate.go:330] unrecognized feature gate: Example Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192653 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192658 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192663 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192668 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192672 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192677 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192681 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192685 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192689 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192693 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192701 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192706 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192710 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192714 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192718 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192722 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192741 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192745 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192749 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192752 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192756 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192760 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192764 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192769 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192773 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192779 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192783 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192787 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192791 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192796 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192800 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192804 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192808 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192811 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192815 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192819 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192824 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192828 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192831 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192835 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192839 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192843 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192847 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192851 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192855 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192858 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192863 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192869 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192873 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192876 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192880 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192883 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192887 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192891 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192894 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192898 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192901 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192906 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192909 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192914 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192919 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192923 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192927 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192930 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192934 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192938 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.192941 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193045 4810 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193056 4810 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193066 4810 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193071 4810 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193077 4810 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193082 4810 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193087 4810 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193092 4810 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193097 4810 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193101 4810 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193105 4810 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193110 4810 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193114 4810 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193120 4810 flags.go:64] FLAG: --cgroup-root="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193124 4810 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193129 4810 flags.go:64] FLAG: --client-ca-file="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193133 4810 flags.go:64] FLAG: --cloud-config="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193137 4810 flags.go:64] FLAG: --cloud-provider="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193141 4810 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193148 4810 flags.go:64] FLAG: --cluster-domain="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193152 4810 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193156 4810 flags.go:64] FLAG: --config-dir="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193161 4810 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193166 4810 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193172 4810 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193176 4810 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193180 4810 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193185 4810 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193194 4810 flags.go:64] FLAG: --contention-profiling="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193199 4810 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193202 4810 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193207 4810 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193210 4810 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193217 4810 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193221 4810 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193225 4810 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193229 4810 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193233 4810 flags.go:64] FLAG: --enable-server="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193237 4810 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193245 4810 flags.go:64] FLAG: --event-burst="100" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193249 4810 flags.go:64] FLAG: --event-qps="50" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193253 4810 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193257 4810 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193261 4810 flags.go:64] FLAG: --eviction-hard="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193266 4810 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193270 4810 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193274 4810 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193278 4810 flags.go:64] FLAG: --eviction-soft="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193282 4810 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193286 4810 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193290 4810 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193294 4810 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193298 4810 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193302 4810 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193307 4810 flags.go:64] FLAG: --feature-gates="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193313 4810 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193317 4810 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193321 4810 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193325 4810 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193329 4810 flags.go:64] FLAG: --healthz-port="10248" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193334 4810 flags.go:64] FLAG: --help="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193338 4810 flags.go:64] FLAG: --hostname-override="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193342 4810 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193346 4810 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193368 4810 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193373 4810 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193377 4810 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193381 4810 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193386 4810 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193389 4810 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193394 4810 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193398 4810 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193402 4810 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193406 4810 flags.go:64] FLAG: --kube-reserved="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193410 4810 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193414 4810 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193418 4810 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193422 4810 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193425 4810 flags.go:64] FLAG: --lock-file="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193429 4810 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193433 4810 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193438 4810 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193444 4810 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193448 4810 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193452 4810 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193457 4810 flags.go:64] FLAG: --logging-format="text" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193461 4810 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193465 4810 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193469 4810 flags.go:64] FLAG: --manifest-url="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193473 4810 flags.go:64] FLAG: --manifest-url-header="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193479 4810 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193483 4810 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193488 4810 flags.go:64] FLAG: --max-pods="110" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193492 4810 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193496 4810 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193500 4810 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193504 4810 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193508 4810 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193512 4810 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193516 4810 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193532 4810 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193536 4810 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193540 4810 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193544 4810 flags.go:64] FLAG: --pod-cidr="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193548 4810 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193555 4810 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193558 4810 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193563 4810 flags.go:64] FLAG: --pods-per-core="0" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193566 4810 flags.go:64] FLAG: --port="10250" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193570 4810 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193575 4810 flags.go:64] FLAG: --provider-id="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193579 4810 flags.go:64] FLAG: --qos-reserved="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193583 4810 flags.go:64] FLAG: --read-only-port="10255" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193587 4810 flags.go:64] FLAG: --register-node="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193591 4810 flags.go:64] FLAG: --register-schedulable="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193595 4810 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193603 4810 flags.go:64] FLAG: --registry-burst="10" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193607 4810 flags.go:64] FLAG: --registry-qps="5" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193612 4810 flags.go:64] FLAG: --reserved-cpus="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193616 4810 flags.go:64] FLAG: --reserved-memory="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193622 4810 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193626 4810 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193630 4810 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193634 4810 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193639 4810 flags.go:64] FLAG: --runonce="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193643 4810 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193647 4810 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193651 4810 flags.go:64] FLAG: --seccomp-default="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193655 4810 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193659 4810 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193663 4810 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193667 4810 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193671 4810 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193675 4810 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193679 4810 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193683 4810 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193692 4810 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193696 4810 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193700 4810 flags.go:64] FLAG: --system-cgroups="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193704 4810 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193713 4810 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193717 4810 flags.go:64] FLAG: --tls-cert-file="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193721 4810 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193756 4810 flags.go:64] FLAG: --tls-min-version="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193760 4810 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193765 4810 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193769 4810 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193773 4810 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193777 4810 flags.go:64] FLAG: --v="2" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193783 4810 flags.go:64] FLAG: --version="false" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193788 4810 flags.go:64] FLAG: --vmodule="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193793 4810 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.193798 4810 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193921 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193926 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193930 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193934 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193938 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193941 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193945 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193949 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193952 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193957 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193960 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193964 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193968 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193972 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193975 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193979 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193982 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193986 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193989 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.193997 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194001 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194005 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194008 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194012 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194015 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194019 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194023 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194026 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194030 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194034 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194039 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194044 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194048 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194052 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194055 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194063 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194066 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194070 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194073 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194077 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194081 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194086 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194091 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194095 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194099 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194103 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194106 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194110 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194114 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194118 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194122 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194125 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194129 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194133 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194136 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194342 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194348 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194351 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194355 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194358 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194362 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194365 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194369 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194372 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194376 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194379 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194383 4810 feature_gate.go:330] unrecognized feature gate: Example Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194389 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194393 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194398 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.194401 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.194407 4810 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.209999 4810 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.210056 4810 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210213 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210240 4810 feature_gate.go:330] unrecognized feature gate: Example Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210254 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210266 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210278 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210290 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210300 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210312 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210324 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210335 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210344 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210353 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210362 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210372 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210380 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210389 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210397 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210405 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210414 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210422 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210432 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210441 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210449 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210457 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210466 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210475 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210486 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210499 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210509 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210518 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210527 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210536 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210553 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210564 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210576 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210587 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210596 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210605 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210614 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210624 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210633 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210642 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210653 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210661 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210673 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210692 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210712 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210725 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210779 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210793 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210802 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210812 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210820 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210829 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210837 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210847 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210855 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210864 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210873 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210883 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210891 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210903 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210912 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210921 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210930 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210939 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210948 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210957 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210969 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210980 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.210991 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.211007 4810 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211274 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211290 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211300 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211309 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211319 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211328 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211337 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211347 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211355 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211364 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211376 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211432 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211445 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211456 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211467 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211478 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211488 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211496 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211506 4810 feature_gate.go:330] unrecognized feature gate: Example Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211515 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211524 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211533 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211543 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211552 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211561 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211571 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211579 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211588 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211597 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211605 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211617 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211629 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211639 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211648 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211657 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211667 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211677 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211686 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211695 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211704 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211715 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211725 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211762 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211772 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211781 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211789 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211798 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211809 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211820 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211828 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211838 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211847 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211856 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211865 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211874 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211883 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211891 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211900 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211909 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211918 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211929 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211940 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211950 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211959 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211969 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211980 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211989 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.211998 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.212007 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.212016 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.212025 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.212039 4810 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.212383 4810 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.218435 4810 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.218644 4810 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.219870 4810 server.go:997] "Starting client certificate rotation" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.219934 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.220330 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-21 23:37:12.895610752 +0000 UTC Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.220496 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.227296 4810 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.229431 4810 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.231672 4810 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.242699 4810 log.go:25] "Validated CRI v1 runtime API" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.261098 4810 log.go:25] "Validated CRI v1 image API" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.263190 4810 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.266140 4810 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-05-36-20-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.266193 4810 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.289845 4810 manager.go:217] Machine: {Timestamp:2025-12-03 05:41:16.287930456 +0000 UTC m=+0.223391337 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:246a1c9c-2777-4e36-9872-3cdc4a9802f0 BootID:827cfc94-e475-4138-b039-a83e4376049e Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:87:e5:2c Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:87:e5:2c Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:56:c5:30 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:9e:19:5d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:47:2e:92 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:b5:fa:d8 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:da:0f:c2:ce:50:e0 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:06:24:ff:d1:33:75 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.290208 4810 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.290389 4810 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.290917 4810 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.291134 4810 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.291184 4810 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.291585 4810 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.291604 4810 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.291911 4810 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.291954 4810 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.292198 4810 state_mem.go:36] "Initialized new in-memory state store" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.292504 4810 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.293299 4810 kubelet.go:418] "Attempting to sync node with API server" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.293330 4810 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.293364 4810 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.293383 4810 kubelet.go:324] "Adding apiserver pod source" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.293402 4810 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.295864 4810 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.296029 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.296042 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.296144 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.296163 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.296402 4810 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.297347 4810 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298052 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298089 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298100 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298112 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298130 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298142 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298153 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298171 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298187 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298204 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298244 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298254 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.298476 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.299015 4810 server.go:1280] "Started kubelet" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.299688 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.300110 4810 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.300101 4810 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 05:41:16 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.301373 4810 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.302449 4810 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.302506 4810 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.302780 4810 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 17:35:56.268120009 +0000 UTC Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.302376 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.23:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d9e1cfa8dd480 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 05:41:16.298974336 +0000 UTC m=+0.234435187,LastTimestamp:2025-12-03 05:41:16.298974336 +0000 UTC m=+0.234435187,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.303000 4810 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.302919 4810 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 971h54m39.965210356s for next certificate rotation Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.302828 4810 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.303123 4810 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.303550 4810 server.go:460] "Adding debug handlers to kubelet server" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.302957 4810 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.305832 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="200ms" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.306631 4810 factory.go:55] Registering systemd factory Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.306692 4810 factory.go:221] Registration of the systemd container factory successfully Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.308789 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.309154 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.309433 4810 factory.go:153] Registering CRI-O factory Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.309483 4810 factory.go:221] Registration of the crio container factory successfully Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.309611 4810 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.309676 4810 factory.go:103] Registering Raw factory Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.309709 4810 manager.go:1196] Started watching for new ooms in manager Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.312239 4810 manager.go:319] Starting recovery of all containers Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321622 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321673 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321685 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321695 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321705 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321715 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321782 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321823 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321836 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321847 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321858 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321869 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321882 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321895 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321905 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321916 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321926 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321937 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321949 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321985 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.321996 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322007 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322020 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322031 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322044 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322278 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322289 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322303 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322314 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322324 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322334 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322343 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322353 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322363 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322372 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322381 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322390 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322399 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322409 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322417 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322425 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322434 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322444 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322453 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322464 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322473 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322481 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322491 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322501 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322511 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322520 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322532 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322553 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322563 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322575 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322585 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322595 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322605 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322615 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.322625 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324441 4810 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324496 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324524 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324545 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324567 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324589 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324608 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324627 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324648 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324667 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324689 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324709 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324728 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324776 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324796 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324815 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324835 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324854 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324874 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324892 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324910 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324931 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324950 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324970 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.324992 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325010 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325031 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325050 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325070 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325089 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325109 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325129 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325148 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325167 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325195 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325215 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325233 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325253 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325274 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325295 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325315 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325334 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325358 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325377 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325396 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325424 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325446 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325467 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325488 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325509 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325531 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325552 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325572 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325593 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325613 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325648 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325669 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325689 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325710 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325755 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325775 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325793 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325812 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325830 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325849 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325869 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325887 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325906 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325924 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325942 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325962 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.325982 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326001 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326021 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326044 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326061 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326079 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326097 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326117 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326136 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326155 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326173 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326191 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326210 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326231 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326250 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326268 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326286 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326305 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326325 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326354 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326383 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326401 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326420 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326441 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326459 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326476 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326494 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326515 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326533 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326552 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326571 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326588 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326606 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326624 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326644 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326666 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326696 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326773 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326802 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326825 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326854 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326879 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326907 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326932 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326954 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326972 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.326995 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327014 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327040 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327059 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327077 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327096 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327115 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327133 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327152 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327170 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327188 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327205 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327222 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327241 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327259 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327278 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327297 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327314 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327332 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327351 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327368 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327387 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327405 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327423 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327442 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327460 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327477 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327521 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327539 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327558 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327578 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327597 4810 reconstruct.go:97] "Volume reconstruction finished" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.327610 4810 reconciler.go:26] "Reconciler: start to sync state" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.341674 4810 manager.go:324] Recovery completed Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.354019 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.360887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.360960 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.360978 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.371348 4810 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.371392 4810 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.371424 4810 state_mem.go:36] "Initialized new in-memory state store" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.373966 4810 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.376014 4810 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.376078 4810 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.376123 4810 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.376294 4810 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.379793 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.379885 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.384683 4810 policy_none.go:49] "None policy: Start" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.385788 4810 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.385822 4810 state_mem.go:35] "Initializing new in-memory state store" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.404898 4810 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.432847 4810 manager.go:334] "Starting Device Plugin manager" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.432894 4810 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.432905 4810 server.go:79] "Starting device plugin registration server" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.433619 4810 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.433633 4810 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.433829 4810 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.433899 4810 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.433906 4810 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.444572 4810 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.476481 4810 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.476629 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.478080 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.478123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.478135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.478297 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.478770 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.478852 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479217 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479276 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479305 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479480 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479722 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479784 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.479990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.480028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.480039 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.480817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.480852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.480862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.480999 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.481044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.481087 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.481105 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.481113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.481134 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.482872 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.482893 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.482901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.482933 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.482948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.482959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.483083 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.483254 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.483307 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484102 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484317 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484351 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484380 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.484419 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.485270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.485295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.485304 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.507307 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="400ms" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.531881 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.531948 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532002 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532049 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532084 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532113 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532144 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532215 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532268 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532306 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532339 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532368 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532397 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532429 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.532462 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.533883 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.535000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.535060 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.535082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.535123 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.535596 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.23:6443: connect: connection refused" node="crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.633788 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634102 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634215 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634138 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634290 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634322 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634364 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634406 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634529 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634565 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634615 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634647 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634654 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634789 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634790 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634843 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634874 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634886 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634685 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634939 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634968 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.634982 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635018 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635060 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635073 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635107 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635025 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635126 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635167 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.635317 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.735745 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.737558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.737611 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.737624 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.737655 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.738016 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.23:6443: connect: connection refused" node="crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.817399 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.825334 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.842612 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.854865 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.859772 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-cef52bf56a7a2057f7f07d551ed8f89c13ff058919341d5ca74f93d4a8a25e9f WatchSource:0}: Error finding container cef52bf56a7a2057f7f07d551ed8f89c13ff058919341d5ca74f93d4a8a25e9f: Status 404 returned error can't find the container with id cef52bf56a7a2057f7f07d551ed8f89c13ff058919341d5ca74f93d4a8a25e9f Dec 03 05:41:16 crc kubenswrapper[4810]: I1203 05:41:16.863494 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.870407 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-d04072289338ff68c535db7925c9c80ab14c43a47de0f6682f6f2503ae721905 WatchSource:0}: Error finding container d04072289338ff68c535db7925c9c80ab14c43a47de0f6682f6f2503ae721905: Status 404 returned error can't find the container with id d04072289338ff68c535db7925c9c80ab14c43a47de0f6682f6f2503ae721905 Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.877229 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-6c108cd0020541b2326c7d5b49cde28bb41cce40212d72f921115e2edbd94630 WatchSource:0}: Error finding container 6c108cd0020541b2326c7d5b49cde28bb41cce40212d72f921115e2edbd94630: Status 404 returned error can't find the container with id 6c108cd0020541b2326c7d5b49cde28bb41cce40212d72f921115e2edbd94630 Dec 03 05:41:16 crc kubenswrapper[4810]: W1203 05:41:16.888577 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-b8084c19c40f5546ab3b9c21dbc0fa17b71ab8a0c74d99c1b83115daf39a4c9a WatchSource:0}: Error finding container b8084c19c40f5546ab3b9c21dbc0fa17b71ab8a0c74d99c1b83115daf39a4c9a: Status 404 returned error can't find the container with id b8084c19c40f5546ab3b9c21dbc0fa17b71ab8a0c74d99c1b83115daf39a4c9a Dec 03 05:41:16 crc kubenswrapper[4810]: E1203 05:41:16.907964 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="800ms" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.138148 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.139440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.139486 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.139497 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.139523 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 05:41:17 crc kubenswrapper[4810]: E1203 05:41:17.140119 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.23:6443: connect: connection refused" node="crc" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.301460 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.386166 4810 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075" exitCode=0 Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.386242 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.386797 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6c108cd0020541b2326c7d5b49cde28bb41cce40212d72f921115e2edbd94630"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.387055 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388281 4810 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="e5a087b6a7a2fa4131f93c65787b04662e70c7329dc26380dba9a8175623f264" exitCode=0 Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388359 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"e5a087b6a7a2fa4131f93c65787b04662e70c7329dc26380dba9a8175623f264"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388382 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d04072289338ff68c535db7925c9c80ab14c43a47de0f6682f6f2503ae721905"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388455 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388958 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.389144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.389201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.389215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.388984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.392718 4810 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93" exitCode=0 Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.392883 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.392948 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3980cfe5ebc8b85cba81bd3eb40ce37fa2cb3dec38d21b4d5a311a28303bdc5d"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.393139 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.395066 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.395126 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.395148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.395538 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.395562 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cef52bf56a7a2057f7f07d551ed8f89c13ff058919341d5ca74f93d4a8a25e9f"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.397821 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218" exitCode=0 Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.397862 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.397926 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b8084c19c40f5546ab3b9c21dbc0fa17b71ab8a0c74d99c1b83115daf39a4c9a"} Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.398011 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.398758 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.398789 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.398800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.400835 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.402475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.402505 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.402514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: W1203 05:41:17.430860 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:17 crc kubenswrapper[4810]: E1203 05:41:17.431081 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:17 crc kubenswrapper[4810]: W1203 05:41:17.525168 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:17 crc kubenswrapper[4810]: E1203 05:41:17.525288 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:17 crc kubenswrapper[4810]: E1203 05:41:17.709522 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="1.6s" Dec 03 05:41:17 crc kubenswrapper[4810]: W1203 05:41:17.727952 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:17 crc kubenswrapper[4810]: E1203 05:41:17.728043 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:17 crc kubenswrapper[4810]: W1203 05:41:17.738196 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.23:6443: connect: connection refused Dec 03 05:41:17 crc kubenswrapper[4810]: E1203 05:41:17.738314 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.23:6443: connect: connection refused" logger="UnhandledError" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.941557 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.943308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.943356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.943371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:17 crc kubenswrapper[4810]: I1203 05:41:17.943406 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.275585 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.403003 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.403051 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.403064 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.403076 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.403088 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.403210 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.405192 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.405225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.405237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.405695 4810 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58" exitCode=0 Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.405788 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.406169 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.407643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.407671 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.407681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.407885 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d199f42e09199f0d41c5f97d8a32674653aaceaaef15a92667f73bee51f1f8b5"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.407981 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.408795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.408819 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.408826 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.413065 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.413092 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.413103 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.413188 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.414162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.414185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.414196 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.420346 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.420376 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.420388 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61"} Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.420457 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.421587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.421609 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:18 crc kubenswrapper[4810]: I1203 05:41:18.421617 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.388525 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.414108 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427239 4810 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9" exitCode=0 Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9"} Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427400 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427458 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427514 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427577 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.427680 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.428827 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.428882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.428936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.428962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.428975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.428989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.429596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.429664 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.429683 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.430277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.430354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.430374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:19 crc kubenswrapper[4810]: I1203 05:41:19.925488 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.435567 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5"} Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.435657 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133"} Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.435679 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.435773 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.435818 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.435686 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f"} Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.437310 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.437352 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.437363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.437603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.437652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:20 crc kubenswrapper[4810]: I1203 05:41:20.437764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:21 crc kubenswrapper[4810]: I1203 05:41:21.488992 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8"} Dec 03 05:41:21 crc kubenswrapper[4810]: I1203 05:41:21.489060 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919"} Dec 03 05:41:21 crc kubenswrapper[4810]: I1203 05:41:21.489182 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:21 crc kubenswrapper[4810]: I1203 05:41:21.490513 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:21 crc kubenswrapper[4810]: I1203 05:41:21.490549 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:21 crc kubenswrapper[4810]: I1203 05:41:21.490562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.182638 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.182837 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.184056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.184112 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.184128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.493676 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.495691 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.495777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:22 crc kubenswrapper[4810]: I1203 05:41:22.495799 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:23 crc kubenswrapper[4810]: I1203 05:41:23.162379 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:23 crc kubenswrapper[4810]: I1203 05:41:23.162662 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:23 crc kubenswrapper[4810]: I1203 05:41:23.164407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:23 crc kubenswrapper[4810]: I1203 05:41:23.164457 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:23 crc kubenswrapper[4810]: I1203 05:41:23.164474 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.109778 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.110072 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.112166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.112255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.112275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.598087 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.598317 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.599870 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.599944 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.599962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:24 crc kubenswrapper[4810]: I1203 05:41:24.631976 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.334042 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.334414 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.336295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.336361 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.336381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.342103 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.503945 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.504027 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.503953 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.505450 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.505490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.505501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.505847 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.505915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:25 crc kubenswrapper[4810]: I1203 05:41:25.505946 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:26 crc kubenswrapper[4810]: E1203 05:41:26.444799 4810 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 05:41:26 crc kubenswrapper[4810]: I1203 05:41:26.535864 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:26 crc kubenswrapper[4810]: I1203 05:41:26.536858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:26 crc kubenswrapper[4810]: I1203 05:41:26.536900 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:26 crc kubenswrapper[4810]: I1203 05:41:26.536918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.110129 4810 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.110250 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.587480 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.587633 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.588904 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.588989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:27 crc kubenswrapper[4810]: I1203 05:41:27.589010 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:27 crc kubenswrapper[4810]: E1203 05:41:27.945416 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 03 05:41:28 crc kubenswrapper[4810]: E1203 05:41:28.277186 4810 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 05:41:28 crc kubenswrapper[4810]: I1203 05:41:28.302496 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 05:41:29 crc kubenswrapper[4810]: W1203 05:41:29.056263 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.058402 4810 trace.go:236] Trace[54473367]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 05:41:19.054) (total time: 10001ms): Dec 03 05:41:29 crc kubenswrapper[4810]: Trace[54473367]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (05:41:29.056) Dec 03 05:41:29 crc kubenswrapper[4810]: Trace[54473367]: [10.0015547s] [10.0015547s] END Dec 03 05:41:29 crc kubenswrapper[4810]: E1203 05:41:29.058481 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 05:41:29 crc kubenswrapper[4810]: E1203 05:41:29.311287 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 03 05:41:29 crc kubenswrapper[4810]: W1203 05:41:29.489140 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.489346 4810 trace.go:236] Trace[524661808]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 05:41:19.486) (total time: 10002ms): Dec 03 05:41:29 crc kubenswrapper[4810]: Trace[524661808]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (05:41:29.489) Dec 03 05:41:29 crc kubenswrapper[4810]: Trace[524661808]: [10.002588665s] [10.002588665s] END Dec 03 05:41:29 crc kubenswrapper[4810]: E1203 05:41:29.489390 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.546523 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.548154 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.548231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.548249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.548289 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.662567 4810 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.662655 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.670506 4810 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 05:41:29 crc kubenswrapper[4810]: I1203 05:41:29.670625 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 05:41:32 crc kubenswrapper[4810]: I1203 05:41:32.474470 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 05:41:32 crc kubenswrapper[4810]: I1203 05:41:32.497056 4810 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 03 05:41:32 crc kubenswrapper[4810]: I1203 05:41:32.594533 4810 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.173417 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.173699 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.175404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.175455 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.175471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.181801 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.481858 4810 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.555707 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.555873 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.557266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.557354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:33 crc kubenswrapper[4810]: I1203 05:41:33.557385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.644176 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.644436 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.646425 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.646488 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.646505 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.664500 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 05:41:34 crc kubenswrapper[4810]: E1203 05:41:34.678154 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.679561 4810 trace.go:236] Trace[228777209]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 05:41:20.886) (total time: 13792ms): Dec 03 05:41:34 crc kubenswrapper[4810]: Trace[228777209]: ---"Objects listed" error: 13792ms (05:41:34.679) Dec 03 05:41:34 crc kubenswrapper[4810]: Trace[228777209]: [13.792688487s] [13.792688487s] END Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.679599 4810 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.680125 4810 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.682961 4810 trace.go:236] Trace[655192479]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 05:41:19.747) (total time: 14935ms): Dec 03 05:41:34 crc kubenswrapper[4810]: Trace[655192479]: ---"Objects listed" error: 14935ms (05:41:34.682) Dec 03 05:41:34 crc kubenswrapper[4810]: Trace[655192479]: [14.935820251s] [14.935820251s] END Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.682986 4810 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.698933 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.703620 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.719485 4810 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:53218->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.719596 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:53218->192.168.126.11:17697: read: connection reset by peer" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.720082 4810 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.720148 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.812697 4810 csr.go:261] certificate signing request csr-hs58b is approved, waiting to be issued Dec 03 05:41:34 crc kubenswrapper[4810]: I1203 05:41:34.818845 4810 csr.go:257] certificate signing request csr-hs58b is issued Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.304581 4810 apiserver.go:52] "Watching apiserver" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.307816 4810 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.308183 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.308753 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.308819 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.308828 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.308952 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.309032 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.309051 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.309093 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.309239 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.309344 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.311169 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.311785 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.311823 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.312109 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.312108 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.312109 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.312227 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.312238 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.314325 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.394859 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.403796 4810 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.408693 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.425844 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.438119 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.450406 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.470666 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.485216 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486618 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486681 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486722 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486788 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486829 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486866 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486900 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486940 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.486979 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487445 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487441 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487497 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487534 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487576 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487602 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487623 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487628 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487711 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487789 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487838 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.487981 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488032 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488079 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488120 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488138 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488159 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488151 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488201 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488248 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488258 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488286 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488291 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488324 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488352 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488324 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488437 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488482 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488534 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488672 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488713 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488779 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488823 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488858 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488896 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488933 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488973 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489010 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489047 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489084 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489130 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489170 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489205 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489283 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489320 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489363 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489398 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489505 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488486 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488569 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488703 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488766 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488909 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.488926 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489087 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489135 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489171 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489518 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489534 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489663 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489823 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489867 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489982 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489972 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490005 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.489543 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490076 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490082 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490110 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490137 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490162 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490185 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490204 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490221 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490243 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490263 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490273 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490283 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490304 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490324 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490344 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490362 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490379 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490399 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490418 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490436 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490458 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490478 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490497 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490517 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490534 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490552 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490572 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490592 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490608 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490625 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490647 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490665 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490684 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490704 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490722 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490758 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490779 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490799 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490817 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490835 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490855 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490873 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490907 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490924 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490944 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490960 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490977 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490995 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491013 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491032 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491051 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491092 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491136 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491156 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491176 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491196 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491216 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491235 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491254 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491272 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491291 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491311 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491329 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491350 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491370 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491388 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491404 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491422 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491440 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491459 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491975 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492001 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492019 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492037 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492055 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492078 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492096 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492115 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492136 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492155 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492174 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492189 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492207 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492222 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492239 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492255 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492272 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492290 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492829 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492868 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492895 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.492922 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493287 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493322 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493341 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493360 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493380 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493456 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493477 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493495 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493514 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493535 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493892 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493917 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493943 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493969 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494139 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494169 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494191 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494211 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494233 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494254 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494275 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494297 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494317 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494340 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494361 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494381 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494402 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494430 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494451 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494472 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494493 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494514 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494536 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494555 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494574 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494593 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494612 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494629 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494649 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494668 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494687 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494709 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494748 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494769 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494791 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494813 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494833 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494856 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494877 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494899 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494919 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494941 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494961 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494979 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494997 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495015 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495031 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495086 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495129 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495164 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495192 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495213 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495237 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495261 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495280 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495301 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495320 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495342 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495363 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495388 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495411 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495491 4810 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495506 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495519 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495530 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495541 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495552 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495562 4810 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495573 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495584 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495594 4810 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495606 4810 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495616 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495626 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495636 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495647 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495658 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495668 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495677 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495688 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495699 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495710 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495719 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495748 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495758 4810 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495767 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495778 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495789 4810 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495799 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495808 4810 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490616 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490772 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.490773 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491001 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491360 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.491708 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493372 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493385 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493348 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493426 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493668 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.493840 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494000 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494045 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494160 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494144 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494472 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494451 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494641 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494676 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494793 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.494908 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495112 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495158 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495326 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495359 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495631 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495773 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495913 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.495915 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496093 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496396 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496422 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496566 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496569 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496499 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496614 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.496811 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497076 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497203 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497401 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497420 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497702 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497832 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.497941 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.498127 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.498193 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.506910 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.508475 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:41:36.008439327 +0000 UTC m=+19.943900198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.509529 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.510010 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.510531 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.510556 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.510884 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.511382 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.511683 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512044 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512066 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512050 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512129 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512281 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512456 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512475 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512500 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.512786 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.513112 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.513427 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.513467 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.513563 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.513881 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.513893 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.514609 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.515127 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.515778 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.515782 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.516624 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.517204 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.517577 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.517680 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.517774 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:36.017753584 +0000 UTC m=+19.953214435 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.517998 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.518085 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.518134 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:36.018116562 +0000 UTC m=+19.953577413 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.518318 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.518581 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.518599 4810 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.518616 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.518935 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.519215 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.519893 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.520096 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.520105 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.521331 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.522379 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.523709 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.523945 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.524181 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.524246 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.524597 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.524827 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.526464 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.529200 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.533083 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.538098 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.538155 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.538220 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.538513 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:36.038314624 +0000 UTC m=+19.973775475 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.539127 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.539309 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.539785 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.539874 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.540872 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.541384 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.542337 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.543502 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.543664 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.543752 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.544347 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.543677 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.544464 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:36.044426273 +0000 UTC m=+19.979887344 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.544697 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.544854 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.547554 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.552216 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.552293 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.552239 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.552808 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.552872 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553329 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553527 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553439 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553724 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553803 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553973 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.553988 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.554801 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556009 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556097 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556123 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556169 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.555925 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556567 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556820 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556852 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.556974 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.557231 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.557399 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.557462 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.557613 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.558323 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.558384 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.558525 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.558604 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.558831 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.559016 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.559066 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.559086 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.559168 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.559260 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.559281 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.560109 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.560398 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.560608 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.560976 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.561144 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.561362 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.562526 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.562712 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.562786 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.562816 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.563262 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.564657 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.564845 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.565390 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.566325 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.566380 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.566584 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.566628 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.566692 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.567092 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.567216 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.567364 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.567509 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.570780 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.571193 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.574493 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226" exitCode=255 Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.575767 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.575841 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226"} Dec 03 05:41:35 crc kubenswrapper[4810]: E1203 05:41:35.583000 4810 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.588634 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.589666 4810 scope.go:117] "RemoveContainer" containerID="1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.591934 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.597372 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.597842 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.597921 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.598085 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.598767 4810 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.598929 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.598950 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.598959 4810 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.598995 4810 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599012 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599030 4810 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599043 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599057 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599074 4810 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599087 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599101 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599113 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599125 4810 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599137 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599150 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599163 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599176 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599189 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599202 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599215 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599228 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599240 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599253 4810 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599267 4810 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599280 4810 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599292 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599304 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599317 4810 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599329 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599341 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599360 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599372 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599384 4810 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599396 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599408 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599420 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599433 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599445 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599457 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599469 4810 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599481 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599493 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599505 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599517 4810 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599530 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599543 4810 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599555 4810 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599567 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599583 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599594 4810 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599606 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599617 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599631 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599643 4810 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599656 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599668 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599679 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599690 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599702 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599714 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599726 4810 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599756 4810 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599770 4810 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599782 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599794 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599806 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599819 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599835 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599863 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599875 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599886 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599898 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599914 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599926 4810 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599938 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599950 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599962 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599974 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599985 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.599996 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600010 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600022 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600034 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600046 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600063 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600076 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600089 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600100 4810 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600113 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600125 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600140 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600155 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600168 4810 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600180 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600195 4810 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600207 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600220 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600232 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600251 4810 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600263 4810 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600276 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600289 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600302 4810 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600315 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600327 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600340 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600353 4810 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600366 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600378 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600509 4810 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.600524 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.606968 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607047 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607066 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607097 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607112 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607130 4810 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607143 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607168 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607185 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607200 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607214 4810 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607234 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607249 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607264 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607277 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607296 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607310 4810 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607324 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607345 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607360 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607375 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607388 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607407 4810 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607421 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607436 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607450 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607468 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607482 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607494 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607512 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607526 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607539 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607553 4810 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607571 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607584 4810 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607598 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607612 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607630 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607644 4810 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607657 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607670 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607689 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607704 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607718 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607756 4810 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607770 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607783 4810 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607797 4810 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607815 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607829 4810 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607842 4810 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.607911 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.618817 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.622165 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.633024 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.637767 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.643379 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.658188 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.659919 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 05:41:35 crc kubenswrapper[4810]: W1203 05:41:35.665002 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-248f94c403e487cb4a07080fbc3428d5e64a6b2039580f62983ceff1308127f7 WatchSource:0}: Error finding container 248f94c403e487cb4a07080fbc3428d5e64a6b2039580f62983ceff1308127f7: Status 404 returned error can't find the container with id 248f94c403e487cb4a07080fbc3428d5e64a6b2039580f62983ceff1308127f7 Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.672592 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.674503 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.695892 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.696310 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.701646 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.710154 4810 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.710190 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.723880 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.806992 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-xqw2n"] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.807313 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.812126 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-7jf2f"] Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.812854 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.813181 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.813348 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.813882 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.816423 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.816577 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.816687 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.816815 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.820571 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-03 05:36:34 +0000 UTC, rotation deadline is 2026-08-30 07:58:31.513172819 +0000 UTC Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.820621 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6482h16m55.692553506s for next certificate rotation Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.823779 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.842351 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.853110 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.860319 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.874503 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.888805 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.905833 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.911615 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-serviceca\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.911651 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-host\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.911668 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjmlf\" (UniqueName: \"kubernetes.io/projected/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-kube-api-access-sjmlf\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.911685 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dpph\" (UniqueName: \"kubernetes.io/projected/1f010726-9024-453c-abb9-a2bbd3ee2337-kube-api-access-7dpph\") pod \"node-resolver-xqw2n\" (UID: \"1f010726-9024-453c-abb9-a2bbd3ee2337\") " pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.911723 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1f010726-9024-453c-abb9-a2bbd3ee2337-hosts-file\") pod \"node-resolver-xqw2n\" (UID: \"1f010726-9024-453c-abb9-a2bbd3ee2337\") " pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.919137 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.937279 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.949470 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.965186 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:35 crc kubenswrapper[4810]: I1203 05:41:35.979231 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.001364 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.012543 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.012653 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1f010726-9024-453c-abb9-a2bbd3ee2337-hosts-file\") pod \"node-resolver-xqw2n\" (UID: \"1f010726-9024-453c-abb9-a2bbd3ee2337\") " pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.012681 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-serviceca\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.012697 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-host\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.012718 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjmlf\" (UniqueName: \"kubernetes.io/projected/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-kube-api-access-sjmlf\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.012771 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dpph\" (UniqueName: \"kubernetes.io/projected/1f010726-9024-453c-abb9-a2bbd3ee2337-kube-api-access-7dpph\") pod \"node-resolver-xqw2n\" (UID: \"1f010726-9024-453c-abb9-a2bbd3ee2337\") " pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.013104 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:41:37.013088555 +0000 UTC m=+20.948549396 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.013152 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1f010726-9024-453c-abb9-a2bbd3ee2337-hosts-file\") pod \"node-resolver-xqw2n\" (UID: \"1f010726-9024-453c-abb9-a2bbd3ee2337\") " pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.013807 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-host\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.014672 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-serviceca\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.015431 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.035417 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjmlf\" (UniqueName: \"kubernetes.io/projected/10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8-kube-api-access-sjmlf\") pod \"node-ca-7jf2f\" (UID: \"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\") " pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.035936 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.040344 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dpph\" (UniqueName: \"kubernetes.io/projected/1f010726-9024-453c-abb9-a2bbd3ee2337-kube-api-access-7dpph\") pod \"node-resolver-xqw2n\" (UID: \"1f010726-9024-453c-abb9-a2bbd3ee2337\") " pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.048303 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.055591 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.068851 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.085532 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.096144 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.113471 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.113550 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.113606 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.113651 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113748 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113790 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113804 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113859 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113877 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:37.113853347 +0000 UTC m=+21.049314188 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113867 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113946 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:37.113917099 +0000 UTC m=+21.049377980 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.113911 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.114019 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:37.11398628 +0000 UTC m=+21.049447321 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.114069 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.114096 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.114171 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:37.114147544 +0000 UTC m=+21.049608385 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.143361 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xqw2n" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.147082 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.150793 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-7jf2f" Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.153825 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f010726_9024_453c_abb9_a2bbd3ee2337.slice/crio-fc4d43a53dc784ab4d99435602404f5f1bd23982fbd87bd7f01106af33eb8929 WatchSource:0}: Error finding container fc4d43a53dc784ab4d99435602404f5f1bd23982fbd87bd7f01106af33eb8929: Status 404 returned error can't find the container with id fc4d43a53dc784ab4d99435602404f5f1bd23982fbd87bd7f01106af33eb8929 Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.224124 4810 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224313 4810 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224350 4810 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224372 4810 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224390 4810 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224529 4810 reflector.go:484] object-"openshift-image-registry"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224551 4810 reflector.go:484] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": watch of *v1.Secret ended with: very short watch: object-"openshift-image-registry"/"node-ca-dockercfg-4777p": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224570 4810 reflector.go:484] object-"openshift-image-registry"/"image-registry-certificates": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"image-registry-certificates": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224573 4810 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224614 4810 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224637 4810 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224655 4810 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224657 4810 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224595 4810 reflector.go:484] object-"openshift-image-registry"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.224685 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/events\": read tcp 38.102.83.23:34312->38.102.83.23:6443: use of closed network connection" event="&Event{ObjectMeta:{node-ca-7jf2f.187d9e219b08603b openshift-image-registry 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-image-registry,Name:node-ca-7jf2f,UID:10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8,APIVersion:v1,ResourceVersion:26355,FieldPath:spec.containers{node-ca},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 05:41:36.171229243 +0000 UTC m=+20.106690084,LastTimestamp:2025-12-03 05:41:36.171229243 +0000 UTC m=+20.106690084,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224837 4810 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224861 4810 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.224880 4810 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.376520 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.376549 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.376679 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:36 crc kubenswrapper[4810]: E1203 05:41:36.376795 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.380847 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.381864 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.383359 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.384130 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.385225 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.386045 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.388864 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.390137 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.390927 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.390999 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.392170 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.392789 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.395359 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.396001 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.396648 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.398092 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.398781 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.400096 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.400552 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.401560 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.402771 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.403414 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.404898 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.405836 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.406669 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.407776 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.408520 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.408560 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.409608 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.410174 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.412415 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.413017 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.413973 4810 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.414079 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.415960 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.417627 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.418114 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.419607 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.420274 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.421426 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.422328 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.424168 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.425303 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.426177 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.428143 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.428960 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.430040 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.430694 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.432286 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.433231 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.434866 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.435032 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.435598 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.436215 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.437428 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.438179 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.439922 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.462696 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.479636 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.493937 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.511905 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.538176 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.555690 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.579124 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.580714 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.581408 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.582752 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-7jf2f" event={"ID":"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8","Type":"ContainerStarted","Data":"e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.582878 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-7jf2f" event={"ID":"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8","Type":"ContainerStarted","Data":"344eef81c27480ed9940f084875e54a0894939fe0eff711c1e0a5a3ff3331e7e"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.584029 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xqw2n" event={"ID":"1f010726-9024-453c-abb9-a2bbd3ee2337","Type":"ContainerStarted","Data":"86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.584070 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xqw2n" event={"ID":"1f010726-9024-453c-abb9-a2bbd3ee2337","Type":"ContainerStarted","Data":"fc4d43a53dc784ab4d99435602404f5f1bd23982fbd87bd7f01106af33eb8929"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.585903 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.586065 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.586183 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"30e4f1f7b29f9eece95fe49bfc2591da5e6a6c6e5d92a7c4fd38a7fc7eb24f5e"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.587038 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"44377830d6490e9731305b05df7776e3774b6f6f3a3e01d52876d676351f34ec"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.588607 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.588700 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"248f94c403e487cb4a07080fbc3428d5e64a6b2039580f62983ceff1308127f7"} Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.590276 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.605532 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.638183 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.644787 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2hd85"] Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.645287 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.646365 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-4279f"] Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.646820 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.658357 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.658600 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.658798 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.659043 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.659276 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.659436 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.659903 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.660406 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.661013 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.664016 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.667532 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-64tlm"] Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.668325 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-qx2gg"] Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.668505 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.669030 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.676755 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.676960 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.677200 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.677454 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.677326 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.681119 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.681123 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.681156 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.685895 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.724368 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.770092 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.802490 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.823844 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828080 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-os-release\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828121 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-k8s-cni-cncf-io\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828153 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-cni-multus\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828178 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-node-log\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828193 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/885c296a-449b-4977-b6d9-396bc84d3cfa-ovn-node-metrics-cert\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-script-lib\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828232 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828249 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-socket-dir-parent\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828268 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-systemd\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828284 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-conf-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828299 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cnibin\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828315 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828330 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-config\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828349 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwp8k\" (UniqueName: \"kubernetes.io/projected/885c296a-449b-4977-b6d9-396bc84d3cfa-kube-api-access-pwp8k\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828368 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-multus-certs\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828386 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bc7906ed-7d0a-444b-8e14-12c67bc3301e-rootfs\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828402 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-netns\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828419 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828475 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-env-overrides\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828494 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/61ac6c2e-df95-49c5-a959-0e061e9c5909-cni-binary-copy\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828512 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-kubelet\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828531 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bc7906ed-7d0a-444b-8e14-12c67bc3301e-proxy-tls\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828549 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-kubelet\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828563 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-log-socket\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828582 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z54p\" (UniqueName: \"kubernetes.io/projected/bc7906ed-7d0a-444b-8e14-12c67bc3301e-kube-api-access-4z54p\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828803 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-etc-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828819 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-cnibin\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828835 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cni-binary-copy\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828851 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828873 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-etc-kubernetes\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828892 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-os-release\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.828982 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-slash\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829029 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-system-cni-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829058 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-daemon-config\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829108 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-bin\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829131 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-hostroot\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829171 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-systemd-units\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829192 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-cni-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829211 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-netns\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829229 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvdbj\" (UniqueName: \"kubernetes.io/projected/61ac6c2e-df95-49c5-a959-0e061e9c5909-kube-api-access-hvdbj\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829249 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5p56\" (UniqueName: \"kubernetes.io/projected/80d62d26-c895-4b0b-a5b7-252147ad2dc9-kube-api-access-b5p56\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829290 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-ovn-kubernetes\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829309 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-netd\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829325 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-cni-bin\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829347 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bc7906ed-7d0a-444b-8e14-12c67bc3301e-mcd-auth-proxy-config\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829378 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-var-lib-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829395 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-ovn\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.829411 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-system-cni-dir\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.841504 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.855787 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.868806 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.889503 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.915058 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.930569 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-socket-dir-parent\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.930941 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.930771 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-socket-dir-parent\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931023 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-systemd\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931144 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-conf-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931175 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cnibin\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931198 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-config\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931216 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwp8k\" (UniqueName: \"kubernetes.io/projected/885c296a-449b-4977-b6d9-396bc84d3cfa-kube-api-access-pwp8k\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931237 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-kubelet\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931254 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-multus-certs\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931271 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bc7906ed-7d0a-444b-8e14-12c67bc3301e-rootfs\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931279 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cnibin\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931316 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-conf-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931386 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-kubelet\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931353 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-netns\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931294 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-netns\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931441 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931469 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-env-overrides\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931500 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/61ac6c2e-df95-49c5-a959-0e061e9c5909-cni-binary-copy\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931525 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bc7906ed-7d0a-444b-8e14-12c67bc3301e-proxy-tls\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931550 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-kubelet\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931549 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-multus-certs\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931571 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-log-socket\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931597 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z54p\" (UniqueName: \"kubernetes.io/projected/bc7906ed-7d0a-444b-8e14-12c67bc3301e-kube-api-access-4z54p\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931603 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bc7906ed-7d0a-444b-8e14-12c67bc3301e-rootfs\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931628 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-etc-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931656 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-cnibin\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931677 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cni-binary-copy\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931699 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931724 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-daemon-config\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931774 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-etc-kubernetes\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931797 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-os-release\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931831 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-slash\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931876 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-system-cni-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931904 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-bin\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931922 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931962 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-hostroot\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.931927 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-hostroot\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932001 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-kubelet\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932016 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-systemd-units\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932028 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-config\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932037 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-cni-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932046 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932087 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-systemd-units\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932087 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-netns\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932055 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-netns\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932130 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-netd\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932146 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvdbj\" (UniqueName: \"kubernetes.io/projected/61ac6c2e-df95-49c5-a959-0e061e9c5909-kube-api-access-hvdbj\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932167 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5p56\" (UniqueName: \"kubernetes.io/projected/80d62d26-c895-4b0b-a5b7-252147ad2dc9-kube-api-access-b5p56\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932204 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-ovn-kubernetes\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932225 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-ovn\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932230 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-cni-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932242 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-cni-bin\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932261 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bc7906ed-7d0a-444b-8e14-12c67bc3301e-mcd-auth-proxy-config\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932288 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-var-lib-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932305 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-system-cni-dir\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932322 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-os-release\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932337 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-k8s-cni-cncf-io\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932353 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-cni-multus\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932355 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-env-overrides\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932374 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-node-log\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/885c296a-449b-4977-b6d9-396bc84d3cfa-ovn-node-metrics-cert\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932406 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-script-lib\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932406 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-etc-kubernetes\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932429 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-cnibin\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932440 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932466 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-etc-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932503 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932038 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-log-socket\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932539 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-netd\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932573 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/61ac6c2e-df95-49c5-a959-0e061e9c5909-cni-binary-copy\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932608 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-system-cni-dir\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932620 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-slash\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932680 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/61ac6c2e-df95-49c5-a959-0e061e9c5909-multus-daemon-config\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-os-release\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932679 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-bin\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932722 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-system-cni-dir\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932758 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-ovn\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932777 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-cni-bin\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932824 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-ovn-kubernetes\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932854 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-node-log\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932877 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-os-release\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932886 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-run-k8s-cni-cncf-io\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932919 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/61ac6c2e-df95-49c5-a959-0e061e9c5909-host-var-lib-cni-multus\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.932970 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-var-lib-openvswitch\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.933195 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/80d62d26-c895-4b0b-a5b7-252147ad2dc9-cni-binary-copy\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.933331 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bc7906ed-7d0a-444b-8e14-12c67bc3301e-mcd-auth-proxy-config\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.933441 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-systemd\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.933597 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-script-lib\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.935281 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.940694 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/885c296a-449b-4977-b6d9-396bc84d3cfa-ovn-node-metrics-cert\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.947897 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bc7906ed-7d0a-444b-8e14-12c67bc3301e-proxy-tls\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.949147 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5p56\" (UniqueName: \"kubernetes.io/projected/80d62d26-c895-4b0b-a5b7-252147ad2dc9-kube-api-access-b5p56\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.953494 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwp8k\" (UniqueName: \"kubernetes.io/projected/885c296a-449b-4977-b6d9-396bc84d3cfa-kube-api-access-pwp8k\") pod \"ovnkube-node-64tlm\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.953717 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z54p\" (UniqueName: \"kubernetes.io/projected/bc7906ed-7d0a-444b-8e14-12c67bc3301e-kube-api-access-4z54p\") pod \"machine-config-daemon-2hd85\" (UID: \"bc7906ed-7d0a-444b-8e14-12c67bc3301e\") " pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.954624 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.955699 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvdbj\" (UniqueName: \"kubernetes.io/projected/61ac6c2e-df95-49c5-a959-0e061e9c5909-kube-api-access-hvdbj\") pod \"multus-4279f\" (UID: \"61ac6c2e-df95-49c5-a959-0e061e9c5909\") " pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.957545 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.963832 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4279f" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.974588 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/80d62d26-c895-4b0b-a5b7-252147ad2dc9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qx2gg\" (UID: \"80d62d26-c895-4b0b-a5b7-252147ad2dc9\") " pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.980019 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.980431 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:36 crc kubenswrapper[4810]: I1203 05:41:36.988117 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" Dec 03 05:41:36 crc kubenswrapper[4810]: W1203 05:41:36.995472 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc7906ed_7d0a_444b_8e14_12c67bc3301e.slice/crio-836dda220211ce11e8dbf33db2dffbe47cdd87626f7f20fad70cc190520f99fe WatchSource:0}: Error finding container 836dda220211ce11e8dbf33db2dffbe47cdd87626f7f20fad70cc190520f99fe: Status 404 returned error can't find the container with id 836dda220211ce11e8dbf33db2dffbe47cdd87626f7f20fad70cc190520f99fe Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.003160 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: W1203 05:41:37.008878 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80d62d26_c895_4b0b_a5b7_252147ad2dc9.slice/crio-9169956dfd4526aec26d44fe3fe57813961814204f49ef80d00f520deb152adf WatchSource:0}: Error finding container 9169956dfd4526aec26d44fe3fe57813961814204f49ef80d00f520deb152adf: Status 404 returned error can't find the container with id 9169956dfd4526aec26d44fe3fe57813961814204f49ef80d00f520deb152adf Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.033803 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.034026 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:41:39.033989753 +0000 UTC m=+22.969450594 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.042189 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.069078 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.093015 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.093247 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.118139 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.135530 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.136096 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.135903 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136207 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.136134 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136237 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:39.13619909 +0000 UTC m=+23.071659931 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136276 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:39.136253731 +0000 UTC m=+23.071714772 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.136303 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136413 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136432 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136442 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136449 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136463 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136481 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136483 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:39.136474387 +0000 UTC m=+23.071935228 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.136536 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:39.136528838 +0000 UTC m=+23.071989679 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.151473 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.177466 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.192031 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.197851 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.211346 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.214401 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.228331 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.233190 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.251633 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.277878 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.287803 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.327845 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.357962 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.367636 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.376281 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.376403 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.387936 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.407935 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.426813 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.463557 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.508184 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.510269 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.571900 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.594588 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.594652 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.594669 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"836dda220211ce11e8dbf33db2dffbe47cdd87626f7f20fad70cc190520f99fe"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.596436 4810 generic.go:334] "Generic (PLEG): container finished" podID="80d62d26-c895-4b0b-a5b7-252147ad2dc9" containerID="abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00" exitCode=0 Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.596478 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerDied","Data":"abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.596755 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerStarted","Data":"9169956dfd4526aec26d44fe3fe57813961814204f49ef80d00f520deb152adf"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.598110 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" exitCode=0 Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.598168 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.598197 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"c274353c504459a46f005a04fdba617dcd302b6629b8821a19b4f8906b78f53d"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.601176 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerStarted","Data":"2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.601275 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerStarted","Data":"02adb96afbcff3346591ad985b1a5897ed1f4c13beb70ab34e1f5373028ec246"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.627057 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.641551 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.654538 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.683923 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.723227 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.756280 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.768382 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.815892 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.847886 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.878533 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.881099 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.881141 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.881155 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.881300 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.883185 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.927505 4810 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.927904 4810 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.929314 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.929378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.929401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.929430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.929453 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:37Z","lastTransitionTime":"2025-12-03T05:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.951029 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.954779 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.954955 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.954997 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.955037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.955056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.955068 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:37Z","lastTransitionTime":"2025-12-03T05:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.968706 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.971937 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.971991 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.972011 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.972036 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.972056 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:37Z","lastTransitionTime":"2025-12-03T05:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:37 crc kubenswrapper[4810]: E1203 05:41:37.988475 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.993163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.993295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.993380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.993464 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.993522 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:37Z","lastTransitionTime":"2025-12-03T05:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:37 crc kubenswrapper[4810]: I1203 05:41:37.996017 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:37Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: E1203 05:41:38.006442 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.010108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.010174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.010190 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.010216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.010236 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: E1203 05:41:38.026395 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: E1203 05:41:38.026556 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.028853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.029004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.029168 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.029280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.029512 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.033097 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.076722 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.121119 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.131620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.131650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.131659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.131672 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.131681 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.159929 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.197203 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.235477 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.235514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.235522 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.235541 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.235549 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.236994 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.275959 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.317677 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.339054 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.339099 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.339109 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.339126 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.339135 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.361988 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.377488 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.377493 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:38 crc kubenswrapper[4810]: E1203 05:41:38.377721 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:38 crc kubenswrapper[4810]: E1203 05:41:38.377864 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.394323 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.441355 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.442302 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.442396 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.442423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.442445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.442458 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.478465 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.516378 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.545206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.545259 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.545271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.545290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.545300 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.557534 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.598810 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.609286 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.609349 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.609363 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.611893 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.614406 4810 generic.go:334] "Generic (PLEG): container finished" podID="80d62d26-c895-4b0b-a5b7-252147ad2dc9" containerID="f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e" exitCode=0 Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.614558 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerDied","Data":"f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.642227 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.648879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.648930 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.648945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.648967 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.648982 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.677645 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.724834 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.751884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.751952 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.751969 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.751993 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.752007 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.769543 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.796123 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.840016 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.854818 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.854869 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.854878 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.854898 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.854946 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.880489 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.916946 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.958311 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.958509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.958558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.958567 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.958585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.958596 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:38Z","lastTransitionTime":"2025-12-03T05:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:38 crc kubenswrapper[4810]: I1203 05:41:38.996513 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:38Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.041920 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.055623 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.056127 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:41:43.056092852 +0000 UTC m=+26.991553703 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.061974 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.062023 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.062043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.062071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.062094 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.083569 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.125801 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.157359 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.157405 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.157441 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.157466 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157482 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157549 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:43.15753338 +0000 UTC m=+27.092994221 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157567 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157581 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157593 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157625 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:43.157617032 +0000 UTC m=+27.093077873 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157689 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157706 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157780 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157798 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157816 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:43.157790756 +0000 UTC m=+27.093251677 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.157883 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:43.157854208 +0000 UTC m=+27.093315059 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.158189 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.164634 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.164664 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.164672 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.164687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.164697 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.202252 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.238256 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.267537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.267585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.267600 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.267620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.267632 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.276762 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.323400 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.368218 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.370044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.370081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.370090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.370108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.370119 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.376389 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:39 crc kubenswrapper[4810]: E1203 05:41:39.376530 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.394471 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.472886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.472951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.472968 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.472988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.473011 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.575631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.575685 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.575702 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.575724 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.575759 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.622491 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.622560 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.622575 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.635014 4810 generic.go:334] "Generic (PLEG): container finished" podID="80d62d26-c895-4b0b-a5b7-252147ad2dc9" containerID="d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e" exitCode=0 Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.636296 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerDied","Data":"d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.663072 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.679157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.679200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.679212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.679229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.679240 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.687207 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.703860 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.718763 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.732363 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.752383 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.766458 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.782275 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.782424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.782453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.782464 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.782484 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.782498 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.799597 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.816551 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.850554 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.883953 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.887982 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.888077 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.888092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.888447 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.888467 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.917852 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.957756 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.991727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.991785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.991797 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.991817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.991832 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:39Z","lastTransitionTime":"2025-12-03T05:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:39 crc kubenswrapper[4810]: I1203 05:41:39.998225 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.095676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.095789 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.095812 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.095842 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.095863 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.199586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.199666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.199710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.199766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.199793 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.304224 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.304308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.304335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.304364 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.304385 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.377122 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.377256 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:40 crc kubenswrapper[4810]: E1203 05:41:40.377367 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:40 crc kubenswrapper[4810]: E1203 05:41:40.377492 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.408298 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.408373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.408385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.408412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.408433 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.511844 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.511920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.511939 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.511966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.511986 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.615038 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.615131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.615157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.615196 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.615229 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.645225 4810 generic.go:334] "Generic (PLEG): container finished" podID="80d62d26-c895-4b0b-a5b7-252147ad2dc9" containerID="1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3" exitCode=0 Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.645299 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerDied","Data":"1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.666470 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.681062 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.703186 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.724938 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.730344 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.730408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.730428 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.730469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.730488 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.764265 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.786600 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.806807 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.825552 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.832815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.832858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.832873 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.832895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.832910 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.852099 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.866000 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.878165 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.892523 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.906307 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.934906 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.935758 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.935783 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.935791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.935808 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.935819 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:40Z","lastTransitionTime":"2025-12-03T05:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:40 crc kubenswrapper[4810]: I1203 05:41:40.946614 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:40Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.039723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.039803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.039820 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.039842 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.039858 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.142869 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.143198 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.143208 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.143223 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.143235 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.245157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.245199 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.245210 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.245229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.245243 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.347825 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.347883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.347895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.347917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.347930 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.377410 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:41 crc kubenswrapper[4810]: E1203 05:41:41.377564 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.450867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.450910 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.450920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.450936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.450948 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.553435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.553486 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.553494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.553509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.553518 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.656534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.656575 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.656587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.656605 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.656618 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.660368 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.664042 4810 generic.go:334] "Generic (PLEG): container finished" podID="80d62d26-c895-4b0b-a5b7-252147ad2dc9" containerID="ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e" exitCode=0 Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.664078 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerDied","Data":"ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.684926 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.712951 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.731812 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.748685 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.759836 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.759884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.759905 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.759931 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.760010 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.761723 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.789158 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.809391 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.822957 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.836531 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.851920 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.865108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.865188 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.865214 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.865249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.865275 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.870475 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.893556 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.910312 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.923718 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.947183 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:41Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.969136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.969180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.969189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.969206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:41 crc kubenswrapper[4810]: I1203 05:41:41.969216 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:41Z","lastTransitionTime":"2025-12-03T05:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.073473 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.073528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.073543 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.073563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.073579 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.176837 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.176896 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.176912 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.176938 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.176952 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.279967 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.280030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.280048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.280075 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.280095 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.376577 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.376964 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:42 crc kubenswrapper[4810]: E1203 05:41:42.377247 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:42 crc kubenswrapper[4810]: E1203 05:41:42.377504 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.385847 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.385907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.385926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.385949 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.385966 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.489770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.490410 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.490563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.490717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.490985 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.595216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.595706 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.595902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.596103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.596250 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.675599 4810 generic.go:334] "Generic (PLEG): container finished" podID="80d62d26-c895-4b0b-a5b7-252147ad2dc9" containerID="70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee" exitCode=0 Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.675780 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerDied","Data":"70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.701642 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.702408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.702442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.702481 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.702506 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.716949 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.741517 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.768052 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.796236 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.807151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.807211 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.807225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.807247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.807263 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.819342 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.844818 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.867891 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.890633 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.905027 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.918174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.918240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.918254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.918278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.918292 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:42Z","lastTransitionTime":"2025-12-03T05:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.930494 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.949802 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.966364 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:42 crc kubenswrapper[4810]: I1203 05:41:42.985746 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.000089 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:42Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.021443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.021498 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.021511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.021536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.021550 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.023863 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.121582 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.122032 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:41:51.121975035 +0000 UTC m=+35.057435916 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.124003 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.124061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.124079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.124105 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.124124 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.222920 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.223015 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.223062 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.223101 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223203 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223225 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223239 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223243 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223292 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:51.22327793 +0000 UTC m=+35.158738771 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223363 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:51.223335131 +0000 UTC m=+35.158796002 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223407 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223462 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223479 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223502 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223553 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:51.223538436 +0000 UTC m=+35.158999307 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.223580 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:51.223568707 +0000 UTC m=+35.159029578 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.226466 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.226539 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.226558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.226622 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.226642 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.330140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.330221 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.330244 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.330274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.330293 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.376493 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:43 crc kubenswrapper[4810]: E1203 05:41:43.376713 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.434084 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.434324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.434413 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.434511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.434601 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.538911 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.538969 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.538984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.539005 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.539040 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.642111 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.642184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.642201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.642220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.642234 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.684222 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" event={"ID":"80d62d26-c895-4b0b-a5b7-252147ad2dc9","Type":"ContainerStarted","Data":"7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.695623 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.696161 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.696300 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.696545 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.707453 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.727008 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.734812 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.736974 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.744206 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.744664 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.744704 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.744720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.744766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.744780 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.754782 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.765021 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.782410 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.797409 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.808969 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.820694 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.834793 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.847273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.847325 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.847335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.847354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.847365 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.855228 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.874614 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.892326 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.909130 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.921780 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.935383 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.948860 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.951585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.951617 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.951626 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.951646 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.951661 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:43Z","lastTransitionTime":"2025-12-03T05:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.962085 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:43 crc kubenswrapper[4810]: I1203 05:41:43.977872 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:43Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.004114 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.035863 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.050840 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.055823 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.055936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.055957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.055985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.056009 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.074672 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.096592 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.116496 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.131872 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.147725 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.160147 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.160206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.160220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.160247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.160264 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.167613 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.185090 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.202199 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:44Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.263918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.263992 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.264014 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.264053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.264076 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.368596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.368666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.368683 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.368711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.368758 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.376395 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.376487 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:44 crc kubenswrapper[4810]: E1203 05:41:44.376579 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:44 crc kubenswrapper[4810]: E1203 05:41:44.376769 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.471932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.472023 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.472040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.472066 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.472084 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.575370 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.575409 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.575417 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.575433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.575463 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.678867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.678956 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.678976 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.679007 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.679027 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.782542 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.782598 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.782611 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.782635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.782648 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.885294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.885334 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.885346 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.885367 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.885379 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.987274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.987314 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.987325 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.987348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:44 crc kubenswrapper[4810]: I1203 05:41:44.987359 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:44Z","lastTransitionTime":"2025-12-03T05:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.089689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.089752 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.089760 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.089775 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.089785 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.218333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.218380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.218392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.218414 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.218426 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.321099 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.321152 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.321161 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.321178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.321189 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.376976 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:45 crc kubenswrapper[4810]: E1203 05:41:45.377129 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.423908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.423973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.424011 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.424042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.424059 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.527306 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.527354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.527368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.527393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.527407 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.629979 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.630032 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.630047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.630070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.630086 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.732639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.732689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.732700 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.732720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.732743 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.836067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.836121 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.836136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.836156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.836171 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.939440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.939517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.939538 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.939570 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:45 crc kubenswrapper[4810]: I1203 05:41:45.939592 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:45Z","lastTransitionTime":"2025-12-03T05:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.043556 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.043652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.043674 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.043713 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.043773 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.146761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.146828 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.146853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.146882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.146926 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.251092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.251205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.251235 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.251274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.251304 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.354791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.354862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.354883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.354911 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.354934 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.377409 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.377487 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:46 crc kubenswrapper[4810]: E1203 05:41:46.377642 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:46 crc kubenswrapper[4810]: E1203 05:41:46.377846 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.412004 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.432466 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.455812 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.457646 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.457720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.457776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.457812 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.457832 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.480370 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.504900 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.538285 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: E1203 05:41:46.546809 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod885c296a_449b_4977_b6d9_396bc84d3cfa.slice/crio-conmon-24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c.scope\": RecentStats: unable to find data in memory cache]" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.591083 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.594950 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.595036 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.595062 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.595097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.595125 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.610292 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.632624 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.659421 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.685676 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.699236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.699329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.699350 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.699379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.699400 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.706679 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.713169 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/0.log" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.718575 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c" exitCode=1 Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.718641 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.719886 4810 scope.go:117] "RemoveContainer" containerID="24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.731434 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.756495 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.776287 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.800644 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.802672 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.802720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.803179 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.803260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.803359 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.836933 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:46Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:41:45.903305 6165 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:41:45.903396 6165 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 05:41:45.903419 6165 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 05:41:45.903460 6165 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 05:41:45.903504 6165 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:41:45.903500 6165 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:41:45.903519 6165 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:41:45.903549 6165 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 05:41:45.903573 6165 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 05:41:45.903577 6165 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 05:41:45.903610 6165 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:41:45.903617 6165 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 05:41:45.903643 6165 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 05:41:45.903698 6165 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 05:41:45.903713 6165 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.858967 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.877376 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.899632 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.911248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.911564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.911832 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.911994 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.912152 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:46Z","lastTransitionTime":"2025-12-03T05:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.920256 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.958903 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.978365 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:46 crc kubenswrapper[4810]: I1203 05:41:46.998138 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.018394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.018426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.018437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.018458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.018472 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.020384 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.038597 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.055159 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.076066 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.091595 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.106604 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.121546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.121606 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.121618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.121640 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.121655 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.225152 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.225519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.225529 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.225547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.225561 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.328862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.328928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.328948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.328976 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.328997 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.376377 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:47 crc kubenswrapper[4810]: E1203 05:41:47.376566 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.432039 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.432096 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.432111 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.432132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.432150 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.535228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.535267 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.535277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.535294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.535308 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.637663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.637763 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.637782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.637812 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.637837 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.726402 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/0.log" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.729786 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.730266 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.741430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.741626 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.741650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.741686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.741715 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.751755 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.769192 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.785065 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.817563 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:46Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:41:45.903305 6165 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:41:45.903396 6165 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 05:41:45.903419 6165 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 05:41:45.903460 6165 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 05:41:45.903504 6165 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:41:45.903500 6165 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:41:45.903519 6165 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:41:45.903549 6165 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 05:41:45.903573 6165 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 05:41:45.903577 6165 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 05:41:45.903610 6165 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:41:45.903617 6165 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 05:41:45.903643 6165 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 05:41:45.903698 6165 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 05:41:45.903713 6165 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.841815 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.844941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.844994 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.845015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.845043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.845064 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.872782 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.884635 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.902350 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.916649 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.935841 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.948870 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.948929 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.948948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.948973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.948988 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:47Z","lastTransitionTime":"2025-12-03T05:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.953414 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.970225 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:47 crc kubenswrapper[4810]: I1203 05:41:47.993543 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:47Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.016238 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.034663 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.052130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.052162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.052171 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.052190 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.052208 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.155567 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.155631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.155650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.155679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.155698 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.259605 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.259680 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.259704 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.259778 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.259802 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.363386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.363521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.363540 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.363569 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.363589 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.377001 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.377175 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.377916 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.378070 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.410215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.410265 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.410282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.410307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.410325 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.433764 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.440055 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.440122 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.440140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.440167 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.440186 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.463855 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.470689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.470853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.470917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.470949 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.471009 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.493534 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.500377 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.500450 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.500470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.500501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.500521 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.523861 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.530609 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.530693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.530714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.530786 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.530805 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.552561 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.552837 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.555879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.555935 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.555958 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.555987 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.556008 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.660534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.660587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.660604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.660629 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.660648 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.737884 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/1.log" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.738684 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/0.log" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.742921 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d" exitCode=1 Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.742972 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.743016 4810 scope.go:117] "RemoveContainer" containerID="24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.743833 4810 scope.go:117] "RemoveContainer" containerID="ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d" Dec 03 05:41:48 crc kubenswrapper[4810]: E1203 05:41:48.744051 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.763597 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.763646 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.763656 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.763676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.763689 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.790697 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.805807 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.832897 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:46Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:41:45.903305 6165 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:41:45.903396 6165 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 05:41:45.903419 6165 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 05:41:45.903460 6165 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 05:41:45.903504 6165 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:41:45.903500 6165 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:41:45.903519 6165 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:41:45.903549 6165 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 05:41:45.903573 6165 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 05:41:45.903577 6165 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 05:41:45.903610 6165 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:41:45.903617 6165 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 05:41:45.903643 6165 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 05:41:45.903698 6165 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 05:41:45.903713 6165 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.857054 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.867121 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.867278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.867360 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.867447 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.867533 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.871608 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.887074 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.915987 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.932787 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.952398 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.966982 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.970842 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.970925 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.970944 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.970965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.970979 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:48Z","lastTransitionTime":"2025-12-03T05:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:48 crc kubenswrapper[4810]: I1203 05:41:48.985971 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:48Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.010530 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.031117 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.053254 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.072008 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.074095 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.074142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.074156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.074180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.074197 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.121077 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz"] Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.122101 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.126914 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.127241 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.147296 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.167163 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.177002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.177053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.177065 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.177090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.177103 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.190629 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.212811 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.224575 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f2c68ef2-27e6-4d66-8520-c795a1dcc811-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.224896 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f2c68ef2-27e6-4d66-8520-c795a1dcc811-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.225184 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f2c68ef2-27e6-4d66-8520-c795a1dcc811-env-overrides\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.225430 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vd55\" (UniqueName: \"kubernetes.io/projected/f2c68ef2-27e6-4d66-8520-c795a1dcc811-kube-api-access-2vd55\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.231704 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.249950 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.291569 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.291944 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.292040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.292542 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24da271e73297ff91d8d88e9d78b88cdac11cf46f54e43839c9b8c2f277f9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:46Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:41:45.903305 6165 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:41:45.903396 6165 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 05:41:45.903419 6165 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 05:41:45.903460 6165 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 05:41:45.903504 6165 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:41:45.903500 6165 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:41:45.903519 6165 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:41:45.903549 6165 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 05:41:45.903573 6165 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 05:41:45.903577 6165 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 05:41:45.903610 6165 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:41:45.903617 6165 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 05:41:45.903643 6165 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 05:41:45.903698 6165 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 05:41:45.903713 6165 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.292255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.293183 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.325425 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.326455 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vd55\" (UniqueName: \"kubernetes.io/projected/f2c68ef2-27e6-4d66-8520-c795a1dcc811-kube-api-access-2vd55\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.326601 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f2c68ef2-27e6-4d66-8520-c795a1dcc811-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.326692 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f2c68ef2-27e6-4d66-8520-c795a1dcc811-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.326783 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f2c68ef2-27e6-4d66-8520-c795a1dcc811-env-overrides\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.327460 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f2c68ef2-27e6-4d66-8520-c795a1dcc811-env-overrides\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.327564 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f2c68ef2-27e6-4d66-8520-c795a1dcc811-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.335844 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f2c68ef2-27e6-4d66-8520-c795a1dcc811-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.347133 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vd55\" (UniqueName: \"kubernetes.io/projected/f2c68ef2-27e6-4d66-8520-c795a1dcc811-kube-api-access-2vd55\") pod \"ovnkube-control-plane-749d76644c-kwhpz\" (UID: \"f2c68ef2-27e6-4d66-8520-c795a1dcc811\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.347802 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.373411 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.377901 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:49 crc kubenswrapper[4810]: E1203 05:41:49.378235 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.396957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.397209 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.397266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.397461 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.397571 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.396626 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.419397 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.440568 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.440564 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.461133 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: W1203 05:41:49.477279 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2c68ef2_27e6_4d66_8520_c795a1dcc811.slice/crio-ea71854e1347a7b2364d6ed0cb57b97e9035bc0b4543fccff167864f539bc792 WatchSource:0}: Error finding container ea71854e1347a7b2364d6ed0cb57b97e9035bc0b4543fccff167864f539bc792: Status 404 returned error can't find the container with id ea71854e1347a7b2364d6ed0cb57b97e9035bc0b4543fccff167864f539bc792 Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.477581 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.490847 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.501495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.501552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.501570 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.501600 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.501621 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.605832 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.605912 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.605932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.605962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.605981 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.709534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.709604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.709622 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.709652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.709673 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.750597 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/1.log" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.757001 4810 scope.go:117] "RemoveContainer" containerID="ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d" Dec 03 05:41:49 crc kubenswrapper[4810]: E1203 05:41:49.757379 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.759154 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" event={"ID":"f2c68ef2-27e6-4d66-8520-c795a1dcc811","Type":"ContainerStarted","Data":"ea71854e1347a7b2364d6ed0cb57b97e9035bc0b4543fccff167864f539bc792"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.776542 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.796887 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.814685 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.815752 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.815808 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.815824 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.815849 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.815867 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.830231 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.849460 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.871924 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.894648 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.917699 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.918722 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.918840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.918856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.918879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.918897 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:49Z","lastTransitionTime":"2025-12-03T05:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.938686 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.958026 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:49 crc kubenswrapper[4810]: I1203 05:41:49.980876 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:49Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.002708 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.018970 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.021475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.021515 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.021528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.021551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.021566 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.041748 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.066997 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.088880 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.124852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.124945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.124972 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.125013 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.125044 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.227818 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.227880 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.227894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.227910 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.227922 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.331268 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.331665 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.331785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.331887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.332008 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.377135 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:50 crc kubenswrapper[4810]: E1203 05:41:50.377289 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.377362 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:50 crc kubenswrapper[4810]: E1203 05:41:50.377534 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.434709 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.434756 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.434764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.434778 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.434787 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.538532 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.538602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.538621 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.538642 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.538655 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.642756 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.642819 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.642836 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.642856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.642871 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.656127 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-zx6mp"] Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.657162 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:50 crc kubenswrapper[4810]: E1203 05:41:50.657351 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.681568 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.703367 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.723115 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.741201 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.743923 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kwjk\" (UniqueName: \"kubernetes.io/projected/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-kube-api-access-2kwjk\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.744056 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.747681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.747705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.747714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.747774 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.747785 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.769414 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" event={"ID":"f2c68ef2-27e6-4d66-8520-c795a1dcc811","Type":"ContainerStarted","Data":"2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.769803 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" event={"ID":"f2c68ef2-27e6-4d66-8520-c795a1dcc811","Type":"ContainerStarted","Data":"f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.776576 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.798043 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.830950 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.845276 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.845349 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kwjk\" (UniqueName: \"kubernetes.io/projected/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-kube-api-access-2kwjk\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:50 crc kubenswrapper[4810]: E1203 05:41:50.845526 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:50 crc kubenswrapper[4810]: E1203 05:41:50.845629 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:51.345600103 +0000 UTC m=+35.281060984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.845915 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.852375 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.852423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.852433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.852450 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.852463 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.869352 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kwjk\" (UniqueName: \"kubernetes.io/projected/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-kube-api-access-2kwjk\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.873352 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.891625 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.917385 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.936441 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.952667 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.955615 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.955689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.955708 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.955781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.955801 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:50Z","lastTransitionTime":"2025-12-03T05:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.969973 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:50 crc kubenswrapper[4810]: I1203 05:41:50.985051 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:50Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.006576 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.023300 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.044095 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.059790 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.059843 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.059896 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.059921 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.059938 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.069023 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.092008 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.114526 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.133281 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.150248 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.150584 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:42:07.150526372 +0000 UTC m=+51.085987243 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.160311 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.162951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.163019 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.163035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.163059 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.163075 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.182014 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.213517 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.228666 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.244777 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.251760 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.251838 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.251879 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.251939 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252136 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252230 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:07.252201846 +0000 UTC m=+51.187662727 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252370 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252446 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252537 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252564 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252398 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252687 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252712 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252657 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:07.252625616 +0000 UTC m=+51.188086497 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252886 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:07.25281745 +0000 UTC m=+51.188278331 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.252974 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:07.252921733 +0000 UTC m=+51.188382604 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.266856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.266922 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.266944 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.266974 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.266994 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.267700 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.289803 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.311114 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.328609 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.344075 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.353665 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.353955 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.354107 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:52.354075473 +0000 UTC m=+36.289536344 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.366054 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.370711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.370820 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.370848 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.370879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.370900 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.377334 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:51 crc kubenswrapper[4810]: E1203 05:41:51.377578 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.391988 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:51Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.474553 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.474613 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.474635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.474665 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.474690 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.578136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.578220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.578238 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.578264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.578284 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.681636 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.681714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.681777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.681807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.681827 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.786466 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.786823 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.787049 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.787261 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.787572 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.891263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.891322 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.891345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.891375 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.891400 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.993710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.993783 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.993799 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.993813 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:51 crc kubenswrapper[4810]: I1203 05:41:51.993824 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:51Z","lastTransitionTime":"2025-12-03T05:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.097348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.097406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.097422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.097446 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.097463 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.186589 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.200662 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.200710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.200721 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.200757 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.200771 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.202578 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.215569 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.229833 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.244079 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.255006 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.275840 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.288274 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.302764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.302796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.302804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.302818 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.302828 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.311824 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.324680 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.339496 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.352430 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.364791 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:52 crc kubenswrapper[4810]: E1203 05:41:52.364980 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:52 crc kubenswrapper[4810]: E1203 05:41:52.365086 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:54.365062091 +0000 UTC m=+38.300522932 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.367848 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.376975 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.377092 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.377097 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:52 crc kubenswrapper[4810]: E1203 05:41:52.377310 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:52 crc kubenswrapper[4810]: E1203 05:41:52.377442 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:52 crc kubenswrapper[4810]: E1203 05:41:52.377531 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.389676 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.403068 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.405326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.405369 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.405382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.405399 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.405425 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.417191 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.429847 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.442352 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:52Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.508107 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.508167 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.508179 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.508201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.508218 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.611326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.611387 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.611405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.611427 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.611442 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.714581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.714643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.714654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.714676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.714691 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.818655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.818721 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.818754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.818771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.818781 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.923063 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.923130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.923149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.923175 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:52 crc kubenswrapper[4810]: I1203 05:41:52.923199 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:52Z","lastTransitionTime":"2025-12-03T05:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.026840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.026945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.026966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.027041 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.027073 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.130406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.130475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.130494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.130522 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.130540 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.233318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.233392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.233404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.233426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.233440 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.336553 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.336595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.336606 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.336655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.336668 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.377157 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:53 crc kubenswrapper[4810]: E1203 05:41:53.377365 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.440642 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.440718 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.440787 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.440826 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.440850 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.544582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.544709 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.544764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.544809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.544831 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.648757 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.648826 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.648850 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.648878 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.648898 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.752066 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.752145 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.752168 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.752201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.752223 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.855061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.855451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.855651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.855834 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.855958 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.960123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.960196 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.960217 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.960244 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:53 crc kubenswrapper[4810]: I1203 05:41:53.960262 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:53Z","lastTransitionTime":"2025-12-03T05:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.063629 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.063772 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.063796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.063826 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.063847 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.167129 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.167197 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.167215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.167242 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.167263 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.270564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.270640 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.270658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.270687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.270708 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.374891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.374955 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.374970 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.374989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.375004 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.377385 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.377403 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.377589 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:54 crc kubenswrapper[4810]: E1203 05:41:54.377843 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:41:54 crc kubenswrapper[4810]: E1203 05:41:54.378020 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:54 crc kubenswrapper[4810]: E1203 05:41:54.378135 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.397966 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:54 crc kubenswrapper[4810]: E1203 05:41:54.398304 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:54 crc kubenswrapper[4810]: E1203 05:41:54.398471 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:41:58.398428684 +0000 UTC m=+42.333889715 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.478803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.478852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.478864 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.478883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.478895 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.582330 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.582377 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.582389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.582406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.582416 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.685357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.685402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.685413 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.685432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.685444 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.787820 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.788117 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.788321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.788573 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.788808 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.891903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.892284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.892418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.892611 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.892804 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.996651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.996772 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.996821 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.996857 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:54 crc kubenswrapper[4810]: I1203 05:41:54.996878 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:54Z","lastTransitionTime":"2025-12-03T05:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.100378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.100443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.100459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.100480 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.100494 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.203493 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.203548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.203562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.203584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.203596 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.306468 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.306527 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.306547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.306573 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.306595 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.377310 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:55 crc kubenswrapper[4810]: E1203 05:41:55.377555 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.410943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.411020 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.411048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.411082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.411104 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.513979 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.514054 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.514077 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.514134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.514153 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.616889 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.617208 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.617424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.617932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.618100 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.721008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.721076 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.721093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.721118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.721136 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.824818 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.825143 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.825320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.825494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.825633 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.932449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.932536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.932555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.932587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:55 crc kubenswrapper[4810]: I1203 05:41:55.932620 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:55Z","lastTransitionTime":"2025-12-03T05:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.035453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.035723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.036093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.036438 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.036790 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.140579 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.141509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.141701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.141909 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.142097 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.245229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.245325 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.245351 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.245386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.245411 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.349296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.349381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.349406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.349443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.349460 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.377319 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.377339 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.377377 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:56 crc kubenswrapper[4810]: E1203 05:41:56.378387 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:41:56 crc kubenswrapper[4810]: E1203 05:41:56.377813 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:56 crc kubenswrapper[4810]: E1203 05:41:56.378150 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.405537 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.423199 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.444542 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.451618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.451727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.451788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.451817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.451839 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.467863 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.487406 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.503504 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.521413 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.537298 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.548590 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.555009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.555061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.555076 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.555099 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.555117 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.562232 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.579514 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.595029 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.608258 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.629191 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.653188 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.657980 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.658020 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.658036 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.658062 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.658081 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.670169 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.690035 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:56Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.761865 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.761935 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.761954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.761989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.762010 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.865097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.865164 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.865187 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.865219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.865242 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.968720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.968769 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.968778 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.968792 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:56 crc kubenswrapper[4810]: I1203 05:41:56.968800 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:56Z","lastTransitionTime":"2025-12-03T05:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.072580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.072715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.072908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.073026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.073105 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.177035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.177146 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.177202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.177229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.177248 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.281404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.281474 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.281491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.281518 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.281537 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.376989 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:57 crc kubenswrapper[4810]: E1203 05:41:57.377337 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.384597 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.384632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.384647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.384663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.384676 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.488015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.488098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.488120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.488142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.488155 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.591569 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.591639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.591658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.591692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.591716 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.694772 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.694886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.694907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.694937 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.694960 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.798142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.798215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.798234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.798263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.798288 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.901311 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.901368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.901380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.901401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:57 crc kubenswrapper[4810]: I1203 05:41:57.901414 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:57Z","lastTransitionTime":"2025-12-03T05:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.004555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.004614 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.004628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.004650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.004664 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.108089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.108140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.108149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.108169 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.108180 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.211429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.211521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.211545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.211574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.211594 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.315123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.315191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.315204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.315226 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.315239 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.377456 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.377502 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.377591 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.377664 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.377789 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.377962 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.418792 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.418866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.418884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.418913 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.418934 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.448001 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.448281 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.448411 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:06.44837418 +0000 UTC m=+50.383835051 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.521740 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.521795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.521809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.521827 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.521838 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.624639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.624693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.624707 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.624726 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.624763 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.634630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.634686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.634700 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.634725 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.634766 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.655596 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:58Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.661130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.661200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.661220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.661252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.661274 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.683129 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:58Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.688288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.688373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.688393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.688422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.688443 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.709901 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:58Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.716432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.716702 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.716890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.717078 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.717264 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.737085 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:58Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.742795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.742838 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.742852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.742877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.742895 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.759810 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:41:58Z is after 2025-08-24T17:21:41Z" Dec 03 05:41:58 crc kubenswrapper[4810]: E1203 05:41:58.760174 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.762831 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.762878 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.762902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.762935 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.762960 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.866260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.866326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.866343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.866370 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.866387 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.970932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.970989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.971002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.971028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:58 crc kubenswrapper[4810]: I1203 05:41:58.971046 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:58Z","lastTransitionTime":"2025-12-03T05:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.075224 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.075274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.075285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.075302 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.075313 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.178630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.178696 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.178719 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.178776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.178798 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.282673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.282775 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.282800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.282830 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.282849 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.377224 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:41:59 crc kubenswrapper[4810]: E1203 05:41:59.377381 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.386264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.386572 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.386754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.386963 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.387176 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.491403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.491466 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.491484 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.491514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.491536 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.593811 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.594069 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.594144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.594213 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.594287 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.696481 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.696565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.696578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.696600 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.696613 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.799274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.799328 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.799340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.799358 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.799371 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.902207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.902277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.902296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.902323 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:41:59 crc kubenswrapper[4810]: I1203 05:41:59.902343 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:41:59Z","lastTransitionTime":"2025-12-03T05:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.005321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.005424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.005445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.005476 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.005495 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.109095 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.109547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.109829 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.110079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.110307 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.214195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.214574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.214723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.214962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.215155 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.317983 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.318079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.318097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.318126 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.318148 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.376850 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.376921 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.376975 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:00 crc kubenswrapper[4810]: E1203 05:42:00.377108 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:00 crc kubenswrapper[4810]: E1203 05:42:00.377210 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:00 crc kubenswrapper[4810]: E1203 05:42:00.377510 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.421145 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.421207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.421227 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.421250 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.421271 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.524655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.524698 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.524720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.524787 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.524815 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.627781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.627873 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.627898 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.627936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.627955 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.731634 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.731700 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.731721 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.731781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.731801 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.834302 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.834358 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.834368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.834386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.834397 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.937294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.937361 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.937383 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.937412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:00 crc kubenswrapper[4810]: I1203 05:42:00.937433 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:00Z","lastTransitionTime":"2025-12-03T05:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.040278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.040321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.040336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.040353 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.040365 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.143446 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.143483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.143491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.143506 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.143517 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.247045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.247118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.247136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.247164 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.247182 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.350770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.350854 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.350874 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.350909 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.350930 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.377173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:01 crc kubenswrapper[4810]: E1203 05:42:01.377395 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.454017 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.454113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.454172 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.454201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.454318 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.557451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.557516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.557535 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.557595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.557612 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.661584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.661705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.661727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.661866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.661891 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.764796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.764857 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.764875 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.764903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.764923 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.867954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.868009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.868024 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.868046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.868063 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.971305 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.971379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.971397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.971425 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:01 crc kubenswrapper[4810]: I1203 05:42:01.971444 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:01Z","lastTransitionTime":"2025-12-03T05:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.075443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.076013 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.076231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.076442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.076837 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.180557 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.180607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.180625 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.180650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.180667 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.284070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.284122 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.284132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.284151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.284165 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.376779 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.376865 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:02 crc kubenswrapper[4810]: E1203 05:42:02.376958 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:02 crc kubenswrapper[4810]: E1203 05:42:02.377130 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.377935 4810 scope.go:117] "RemoveContainer" containerID="ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.378049 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:02 crc kubenswrapper[4810]: E1203 05:42:02.378255 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.391251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.391331 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.391356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.391386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.391409 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.493714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.493866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.493962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.494057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.494172 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.606415 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.606880 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.606901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.606928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.606945 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.710156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.710237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.710248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.710265 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.710275 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.812166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.812225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.812240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.812263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.812279 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.815773 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/1.log" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.819258 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.820077 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.841747 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.860546 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.876890 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.889790 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.902251 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.915094 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.915149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.915162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.915182 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.915194 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:02Z","lastTransitionTime":"2025-12-03T05:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.921407 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.932691 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.951594 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.965356 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.979347 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:02 crc kubenswrapper[4810]: I1203 05:42:02.993753 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:02Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.010492 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.017499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.017552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.017563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.017581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.017594 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.027252 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.051792 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.074606 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.092038 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.102977 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.120088 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.120139 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.120148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.120166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.120178 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.222856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.223433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.223610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.223793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.223960 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.327346 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.327400 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.327412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.327435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.327449 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.376834 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:03 crc kubenswrapper[4810]: E1203 05:42:03.378958 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.430765 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.430825 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.430841 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.430866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.430882 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.534091 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.534164 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.534185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.534211 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.534228 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.637232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.637307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.637324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.637353 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.637371 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.740071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.740153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.740178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.740211 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.740234 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.826162 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/2.log" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.827409 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/1.log" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.831501 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61" exitCode=1 Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.831582 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.831647 4810 scope.go:117] "RemoveContainer" containerID="ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.834227 4810 scope.go:117] "RemoveContainer" containerID="51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61" Dec 03 05:42:03 crc kubenswrapper[4810]: E1203 05:42:03.834531 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.843561 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.843609 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.843627 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.843654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.843671 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.858139 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.877784 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.893707 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.915750 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.939492 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.946646 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.946690 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.946703 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.946722 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.946767 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:03Z","lastTransitionTime":"2025-12-03T05:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.958242 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:03 crc kubenswrapper[4810]: I1203 05:42:03.980966 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:03Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.004600 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.027371 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.050628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.050700 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.050719 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.050777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.050797 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.051088 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.073726 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.110055 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf5c57f7b9820abd44fa1f80ba66a96147264fb66929c0f0de79089334a119d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:41:47Z\\\",\\\"message\\\":\\\"l\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711701 6282 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 05:41:47.711664 6282 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1203 05:41:47.710848 6282 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-4279f in node crc\\\\nF1203 05:41:47.712160 6282 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.137236 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.153962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.154025 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.154061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.154089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.154109 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.161691 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.182704 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.202083 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.257710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.257783 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.257795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.257820 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.257840 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.265678 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.360726 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.360800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.360817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.360840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.360855 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.377303 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.377491 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:04 crc kubenswrapper[4810]: E1203 05:42:04.377644 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.377702 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:04 crc kubenswrapper[4810]: E1203 05:42:04.377916 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:04 crc kubenswrapper[4810]: E1203 05:42:04.378261 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.463817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.463894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.463913 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.463942 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.463960 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.567154 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.567217 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.567271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.567301 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.567320 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.670531 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.670614 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.670639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.670669 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.670693 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.773333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.773390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.773408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.773434 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.773453 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.837879 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/2.log" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.846164 4810 scope.go:117] "RemoveContainer" containerID="51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61" Dec 03 05:42:04 crc kubenswrapper[4810]: E1203 05:42:04.846534 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.876988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.877073 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.877100 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.877130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.877150 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.878020 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.893283 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.917992 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.943016 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.959027 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.980766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.980838 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.980862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.980892 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.980912 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:04Z","lastTransitionTime":"2025-12-03T05:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:04 crc kubenswrapper[4810]: I1203 05:42:04.985398 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:04Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.005275 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.027220 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.049535 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.068874 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.084831 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.084887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.084905 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.084927 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.084940 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.101873 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.124060 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.147491 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.169445 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.188495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.188538 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.188549 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.188566 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.188579 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.189529 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.226358 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.242529 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:05Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.292206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.292281 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.292299 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.292329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.292356 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.376978 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:05 crc kubenswrapper[4810]: E1203 05:42:05.377209 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.395610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.395677 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.395764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.395803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.395830 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.498803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.498888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.498911 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.498941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.498959 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.602674 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.602752 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.602764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.602788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.602802 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.706241 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.706327 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.706351 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.706381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.706398 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.809861 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.809941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.809959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.809986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.810008 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.913334 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.913418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.913436 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.913463 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:05 crc kubenswrapper[4810]: I1203 05:42:05.913482 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:05Z","lastTransitionTime":"2025-12-03T05:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.017047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.017103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.017121 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.017145 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.017163 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.121016 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.121089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.121109 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.121138 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.121158 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.224995 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.225061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.225077 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.225106 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.225125 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.329136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.329212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.329235 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.329264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.329290 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.377350 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.377396 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:06 crc kubenswrapper[4810]: E1203 05:42:06.377563 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.377692 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:06 crc kubenswrapper[4810]: E1203 05:42:06.377962 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:06 crc kubenswrapper[4810]: E1203 05:42:06.378080 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.405241 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.430580 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.432495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.432577 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.432596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.432679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.432700 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.455213 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.476483 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.509951 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.529360 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:06 crc kubenswrapper[4810]: E1203 05:42:06.529583 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:42:06 crc kubenswrapper[4810]: E1203 05:42:06.529707 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:22.529674643 +0000 UTC m=+66.465135534 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.532294 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.537771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.537837 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.537854 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.537883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.537924 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.561688 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.579680 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.603676 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.626466 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.640081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.640144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.640163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.640190 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.640207 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.649947 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.670138 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.688170 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.712601 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.738930 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.744028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.744128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.744156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.744193 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.744222 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.754682 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.772132 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:06Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.847419 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.847485 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.847504 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.847534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.847556 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.951491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.951611 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.951637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.951673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:06 crc kubenswrapper[4810]: I1203 05:42:06.951701 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:06Z","lastTransitionTime":"2025-12-03T05:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.054989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.055259 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.055412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.055548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.055669 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.159907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.160007 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.160027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.160053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.160070 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.238513 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.238675 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:42:39.238645938 +0000 UTC m=+83.174106819 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.263775 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.263819 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.263863 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.263886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.263904 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.340274 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.340333 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.340365 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.340391 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.340595 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.340714 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.340805 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:39.3406954 +0000 UTC m=+83.276156281 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.340858 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:39.340843413 +0000 UTC m=+83.276304244 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.340960 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341027 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341113 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341040 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341143 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341155 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341273 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:39.341237133 +0000 UTC m=+83.276698014 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.341313 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:39.341297814 +0000 UTC m=+83.276758685 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.366579 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.366643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.366654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.366747 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.366762 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.377169 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:07 crc kubenswrapper[4810]: E1203 05:42:07.377437 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.475449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.475501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.475528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.475547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.475556 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.579247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.579587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.579803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.579982 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.580121 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.684083 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.684149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.684167 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.684191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.684211 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.788301 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.788384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.788404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.788434 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.788457 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.893327 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.893376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.893389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.893457 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.893472 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.996236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.996284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.996294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.996315 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:07 crc kubenswrapper[4810]: I1203 05:42:07.996327 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:07Z","lastTransitionTime":"2025-12-03T05:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.098990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.099026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.099035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.099053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.099066 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.202563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.202618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.202633 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.202656 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.202671 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.305288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.305682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.305859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.305996 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.306110 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.376724 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.376942 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.377188 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:08 crc kubenswrapper[4810]: E1203 05:42:08.377161 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:08 crc kubenswrapper[4810]: E1203 05:42:08.377530 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:08 crc kubenswrapper[4810]: E1203 05:42:08.377699 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.410717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.410827 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.410851 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.410887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.410912 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.515174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.515253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.515272 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.515304 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.515323 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.618351 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.618924 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.619088 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.619267 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.619449 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.723597 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.723674 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.723692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.723724 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.723772 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.827566 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.827655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.827683 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.827726 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.827792 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.931637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.931697 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.931719 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.931783 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.931809 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.969910 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.969974 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.969994 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.970025 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.970044 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:08 crc kubenswrapper[4810]: E1203 05:42:08.992519 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:08Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.998613 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.998687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.998715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.998793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:08 crc kubenswrapper[4810]: I1203 05:42:08.998822 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:08Z","lastTransitionTime":"2025-12-03T05:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: E1203 05:42:09.020379 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.026333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.026388 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.026406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.026434 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.026478 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: E1203 05:42:09.049483 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.055573 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.055712 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.055774 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.055809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.055836 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: E1203 05:42:09.078207 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.084342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.084397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.084418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.084445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.084465 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: E1203 05:42:09.107452 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: E1203 05:42:09.107668 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.110983 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.111032 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.111051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.111073 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.111092 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.214222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.214294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.214317 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.214346 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.214373 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.317800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.317904 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.317926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.317955 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.317976 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.376413 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:09 crc kubenswrapper[4810]: E1203 05:42:09.376639 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.421594 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.423516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.423578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.423597 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.423624 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.423647 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.437193 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.444602 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.466137 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.487184 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.521465 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.527139 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.527205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.527223 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.527251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.527316 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.542094 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.567641 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.593341 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.613041 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.631770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.631835 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.631858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.631962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.631984 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.654727 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.676318 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.696942 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.726912 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.735251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.735326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.735345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.735374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.735394 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.744810 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.761005 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.781145 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.803165 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.821489 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:09Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.839204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.839245 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.839255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.839273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.839285 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.942716 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.942812 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.942831 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.942855 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:09 crc kubenswrapper[4810]: I1203 05:42:09.942873 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:09Z","lastTransitionTime":"2025-12-03T05:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.046004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.046061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.046077 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.046099 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.046117 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.149331 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.149395 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.149416 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.149446 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.149464 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.253441 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.253517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.253534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.253562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.253582 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.357723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.357856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.357881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.357913 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.357940 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.377023 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.377073 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.377253 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:10 crc kubenswrapper[4810]: E1203 05:42:10.377404 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:10 crc kubenswrapper[4810]: E1203 05:42:10.377572 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:10 crc kubenswrapper[4810]: E1203 05:42:10.377722 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.461690 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.461789 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.461811 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.461840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.461862 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.565430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.565496 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.565515 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.565539 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.565558 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.669168 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.669232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.669253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.669280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.669299 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.772656 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.772728 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.772779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.772810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.772831 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.877698 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.877926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.877952 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.877988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.878012 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.982194 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.982254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.982270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.982295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:10 crc kubenswrapper[4810]: I1203 05:42:10.982310 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:10Z","lastTransitionTime":"2025-12-03T05:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.086818 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.086934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.086953 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.086982 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.087004 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.191829 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.191931 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.191961 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.192005 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.192025 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.295714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.295823 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.295853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.295888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.295917 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.376964 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:11 crc kubenswrapper[4810]: E1203 05:42:11.377182 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.399405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.399488 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.399511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.399546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.399573 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.502844 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.502920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.502940 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.502968 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.502988 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.606108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.606257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.606285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.606318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.606343 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.709810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.709868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.709886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.709913 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.709934 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.813389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.813452 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.813467 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.813489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.813507 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.916559 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.916637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.916655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.916683 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:11 crc kubenswrapper[4810]: I1203 05:42:11.916702 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:11Z","lastTransitionTime":"2025-12-03T05:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.020517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.020598 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.020623 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.020659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.020685 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.124655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.124771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.124789 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.124819 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.124840 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.228494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.228554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.228570 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.228589 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.228604 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.332661 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.332720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.332752 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.332774 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.332786 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.376505 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.376586 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:12 crc kubenswrapper[4810]: E1203 05:42:12.376663 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.376843 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:12 crc kubenswrapper[4810]: E1203 05:42:12.377045 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:12 crc kubenswrapper[4810]: E1203 05:42:12.377239 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.435472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.435796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.435866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.435980 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.436052 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.538432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.538896 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.539010 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.539123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.539224 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.642172 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.642237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.642258 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.642282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.642299 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.745893 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.745984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.746010 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.746050 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.746074 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.849649 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.849953 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.850084 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.850187 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.850316 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.953656 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.953713 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.953726 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.953759 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:12 crc kubenswrapper[4810]: I1203 05:42:12.953776 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:12Z","lastTransitionTime":"2025-12-03T05:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.057261 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.057326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.057351 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.057382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.057409 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.161126 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.161188 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.161207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.161231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.161249 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.263908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.263975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.263992 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.264046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.264065 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.366903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.366982 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.367006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.367040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.367062 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.377304 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:13 crc kubenswrapper[4810]: E1203 05:42:13.377505 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.470236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.470295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.470314 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.470345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.470365 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.573387 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.573439 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.573458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.573485 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.573506 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.676601 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.676688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.676715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.676788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.676819 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.780258 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.780350 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.780371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.780398 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.780416 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.882916 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.882986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.883006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.883044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.883079 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.986214 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.986275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.986290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.986313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:13 crc kubenswrapper[4810]: I1203 05:42:13.986328 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:13Z","lastTransitionTime":"2025-12-03T05:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.089316 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.089382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.089397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.089419 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.089436 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.192407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.192487 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.192536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.192582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.192606 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.296689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.296764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.296782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.296807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.296825 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.376918 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:14 crc kubenswrapper[4810]: E1203 05:42:14.377141 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.377509 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:14 crc kubenswrapper[4810]: E1203 05:42:14.377671 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.378028 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:14 crc kubenswrapper[4810]: E1203 05:42:14.378595 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.399163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.399263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.399289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.399324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.399349 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.502908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.502939 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.502948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.502966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.502977 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.605815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.605887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.605907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.605938 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.605959 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.709776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.709860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.709882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.709912 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.709936 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.814288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.814360 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.814378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.814406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.814425 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.917215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.917279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.917296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.917321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:14 crc kubenswrapper[4810]: I1203 05:42:14.917339 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:14Z","lastTransitionTime":"2025-12-03T05:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.020542 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.020621 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.020643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.020673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.020694 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.124513 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.124582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.124603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.124632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.124651 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.229547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.229638 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.229658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.229687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.229705 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.333376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.333471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.333489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.333516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.333535 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.376936 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:15 crc kubenswrapper[4810]: E1203 05:42:15.377287 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.437165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.437287 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.437307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.437341 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.437363 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.541300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.541393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.541413 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.541843 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.542147 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.645374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.645445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.645466 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.645493 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.645512 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.748162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.748228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.748252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.748285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.748308 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.852150 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.852216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.852233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.852264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.852288 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.956051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.956110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.956124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.956143 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:15 crc kubenswrapper[4810]: I1203 05:42:15.956158 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:15Z","lastTransitionTime":"2025-12-03T05:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.059418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.059475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.059492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.059524 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.059543 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.163435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.163485 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.163502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.163527 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.163546 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.267048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.267139 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.267166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.267200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.267218 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.371681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.372445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.372476 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.372517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.372545 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.376372 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.376405 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.376803 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:16 crc kubenswrapper[4810]: E1203 05:42:16.377011 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:16 crc kubenswrapper[4810]: E1203 05:42:16.376904 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:16 crc kubenswrapper[4810]: E1203 05:42:16.377393 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.381039 4810 scope.go:117] "RemoveContainer" containerID="51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61" Dec 03 05:42:16 crc kubenswrapper[4810]: E1203 05:42:16.389843 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.403885 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.440509 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.461027 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.476006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.476093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.476120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.476156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.476181 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.479695 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.503480 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.531013 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.550665 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.577532 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.579690 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.579788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.579807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.579839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.579858 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.604914 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.629084 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.650012 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.672001 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.682702 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.682797 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.682820 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.682846 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.682864 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.708363 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.728481 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.753328 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.775154 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.786165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.786204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.786215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.786233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.786246 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.797094 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.817691 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:16Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.889626 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.889691 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.889707 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.889773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.889791 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.993624 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.993686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.993706 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.993831 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:16 crc kubenswrapper[4810]: I1203 05:42:16.993860 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:16Z","lastTransitionTime":"2025-12-03T05:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.097769 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.097839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.097858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.097887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.097908 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.201637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.201710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.201755 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.201784 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.201803 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.305152 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.305203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.305222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.305248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.305268 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.376439 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:17 crc kubenswrapper[4810]: E1203 05:42:17.376670 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.408602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.408689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.408711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.408775 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.408804 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.512839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.512921 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.512939 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.512965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.512983 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.617164 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.617244 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.617261 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.617292 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.617313 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.720311 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.720381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.720395 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.720417 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.720431 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.824714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.824837 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.824858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.824890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.824909 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.927725 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.927833 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.927856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.927884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:17 crc kubenswrapper[4810]: I1203 05:42:17.928150 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:17Z","lastTransitionTime":"2025-12-03T05:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.031691 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.031796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.031821 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.031854 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.031877 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.134929 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.134985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.134997 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.135016 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.135029 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.238196 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.238285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.238303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.238330 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.238352 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.341932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.341997 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.342016 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.342042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.342063 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.377076 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.377114 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.377283 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:18 crc kubenswrapper[4810]: E1203 05:42:18.377513 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:18 crc kubenswrapper[4810]: E1203 05:42:18.377650 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:18 crc kubenswrapper[4810]: E1203 05:42:18.377774 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.445108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.445199 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.445218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.445249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.445271 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.549444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.549533 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.549552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.549583 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.549604 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.653376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.653443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.653473 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.653505 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.653528 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.757205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.757260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.757283 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.757307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.757324 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.861280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.861381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.861400 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.861428 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.861449 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.964958 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.965028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.965046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.965073 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:18 crc kubenswrapper[4810]: I1203 05:42:18.965092 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:18Z","lastTransitionTime":"2025-12-03T05:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.069149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.069256 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.069279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.069309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.069328 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.172837 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.172908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.172928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.172954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.172973 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.183644 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.183715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.183761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.183793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.183816 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.200042 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:19Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.206350 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.206424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.206438 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.206462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.206483 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.229786 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:19Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.236221 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.236296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.236318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.236349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.236371 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.258785 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:19Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.265189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.265407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.265569 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.265761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.265925 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.282528 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:19Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.288148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.288193 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.288207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.288233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.288251 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.308075 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:19Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.308338 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.310472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.310528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.310549 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.310571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.310589 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.377224 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:19 crc kubenswrapper[4810]: E1203 05:42:19.377466 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.413784 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.413866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.413887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.413917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.413939 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.517083 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.517124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.517145 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.517174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.517194 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.620978 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.621013 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.621026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.621043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.621056 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.724627 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.724688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.724705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.724764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.724783 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.827336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.827398 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.827412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.827433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.827447 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.930833 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.930899 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.930924 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.930959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:19 crc kubenswrapper[4810]: I1203 05:42:19.930987 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:19Z","lastTransitionTime":"2025-12-03T05:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.034391 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.034494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.034512 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.034536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.034556 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.137807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.137865 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.137876 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.137891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.137900 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.241563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.241636 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.241656 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.241684 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.241702 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.344334 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.344377 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.344386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.344405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.344416 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.376586 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.376622 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.376626 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:20 crc kubenswrapper[4810]: E1203 05:42:20.376831 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:20 crc kubenswrapper[4810]: E1203 05:42:20.376933 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:20 crc kubenswrapper[4810]: E1203 05:42:20.376998 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.447840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.447966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.447988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.448019 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.448042 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.552202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.552275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.552292 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.552318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.552334 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.654927 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.654994 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.655013 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.655039 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.655061 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.757822 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.757885 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.757907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.757933 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.757954 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.861721 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.861800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.861816 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.861839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.861857 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.965357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.965437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.965463 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.965500 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:20 crc kubenswrapper[4810]: I1203 05:42:20.965526 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:20Z","lastTransitionTime":"2025-12-03T05:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.069379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.069438 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.069451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.069472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.069490 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.172884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.172936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.172954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.172981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.173000 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.275977 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.276046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.276070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.276098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.276119 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.377443 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:21 crc kubenswrapper[4810]: E1203 05:42:21.377673 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.379470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.379519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.379532 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.379554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.379567 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.482361 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.482448 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.482477 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.482535 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.482560 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.586616 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.586669 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.586682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.586702 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.586718 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.689476 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.689526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.689542 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.689568 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.689586 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.792522 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.792565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.792575 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.792591 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.792601 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.895390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.895456 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.895467 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.895493 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.895509 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.999017 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.999085 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.999106 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.999133 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:21 crc kubenswrapper[4810]: I1203 05:42:21.999152 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:21Z","lastTransitionTime":"2025-12-03T05:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.102012 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.102054 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.102070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.102098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.102113 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.205063 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.205101 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.205114 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.205134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.205147 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.308959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.309063 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.309084 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.309110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.309130 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.376625 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.376669 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:22 crc kubenswrapper[4810]: E1203 05:42:22.376840 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.376862 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:22 crc kubenswrapper[4810]: E1203 05:42:22.376999 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:22 crc kubenswrapper[4810]: E1203 05:42:22.377244 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.412018 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.412064 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.412080 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.412096 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.412108 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.514968 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.515022 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.515034 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.515056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.515071 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.550777 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:22 crc kubenswrapper[4810]: E1203 05:42:22.550984 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:42:22 crc kubenswrapper[4810]: E1203 05:42:22.551091 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:42:54.551066118 +0000 UTC m=+98.486526949 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.617608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.617675 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.617695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.617724 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.617777 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.720645 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.720692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.720706 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.720728 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.720767 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.824421 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.824517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.824544 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.824575 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.824596 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.927862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.928115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.928194 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.928230 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:22 crc kubenswrapper[4810]: I1203 05:42:22.928381 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:22Z","lastTransitionTime":"2025-12-03T05:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.031879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.031930 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.031947 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.031973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.031989 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.135324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.135404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.135424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.135453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.135473 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.239188 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.239234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.239246 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.239268 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.239281 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.342596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.342638 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.342650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.342668 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.342681 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.376493 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:23 crc kubenswrapper[4810]: E1203 05:42:23.376723 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.445294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.445365 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.445379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.445404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.445421 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.548381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.548467 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.548499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.548539 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.548567 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.651220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.651284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.651304 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.651335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.651356 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.754324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.754391 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.754405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.754426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.754442 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.858397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.858475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.858502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.858545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.858581 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.921997 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/0.log" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.922085 4810 generic.go:334] "Generic (PLEG): container finished" podID="61ac6c2e-df95-49c5-a959-0e061e9c5909" containerID="2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66" exitCode=1 Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.922142 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerDied","Data":"2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.922859 4810 scope.go:117] "RemoveContainer" containerID="2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.943225 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:23Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.958944 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:23Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.961970 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.962052 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.962079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.962255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.962312 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:23Z","lastTransitionTime":"2025-12-03T05:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.979727 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:23Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:23 crc kubenswrapper[4810]: I1203 05:42:23.998781 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:23Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.019070 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.030982 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.052470 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.065000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.065093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.065118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.065153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.065178 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.100365 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.145761 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.168489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.168550 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.168562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.168584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.168599 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.177226 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.199670 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.212420 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.224850 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.237809 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.251464 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.266076 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.270570 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.270608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.270620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.270637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.270648 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.276586 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.288592 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.376931 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.376977 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:24 crc kubenswrapper[4810]: E1203 05:42:24.377116 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:24 crc kubenswrapper[4810]: E1203 05:42:24.377284 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.377525 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:24 crc kubenswrapper[4810]: E1203 05:42:24.377942 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.384332 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.384375 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.384386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.384404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.384419 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.488037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.488520 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.488891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.489181 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.489455 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.592779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.592834 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.592846 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.592862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.592873 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.695294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.695365 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.695385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.695419 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.695440 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.798377 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.798442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.798461 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.798483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.798507 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.901391 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.901460 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.901478 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.901502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.901519 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:24Z","lastTransitionTime":"2025-12-03T05:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.930446 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/0.log" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.930535 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerStarted","Data":"ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533"} Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.954945 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:24 crc kubenswrapper[4810]: I1203 05:42:24.971715 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:24Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.005611 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.005674 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.005692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.005717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.005752 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.007489 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.027084 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.049818 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.070940 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.099458 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.109341 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.109430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.109457 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.109492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.109519 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.117275 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.153517 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.174174 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.190314 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.207412 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.213008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.213118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.213189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.213275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.213334 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.229678 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.251917 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.266818 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.285330 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.302204 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.316002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.316072 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.316092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.316119 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.316185 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.318782 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:25Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.376469 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:25 crc kubenswrapper[4810]: E1203 05:42:25.376676 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.419538 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.419607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.419622 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.419643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.419658 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.523082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.523384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.523524 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.523677 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.523847 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.626038 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.626107 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.626118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.626142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.626152 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.728400 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.728901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.729276 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.729528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.729684 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.832259 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.832408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.832501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.832590 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.832679 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.934868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.935137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.935279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.935439 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:25 crc kubenswrapper[4810]: I1203 05:42:25.935934 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:25Z","lastTransitionTime":"2025-12-03T05:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.039720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.039791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.039801 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.039824 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.039836 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.142599 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.142661 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.142672 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.142688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.142699 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.245987 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.246073 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.246093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.246125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.246146 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.349453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.349537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.349558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.349588 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.349609 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.377038 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.377070 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.377257 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:26 crc kubenswrapper[4810]: E1203 05:42:26.377398 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:26 crc kubenswrapper[4810]: E1203 05:42:26.377562 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:26 crc kubenswrapper[4810]: E1203 05:42:26.377773 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.392047 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.430070 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.447102 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.452345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.452440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.452473 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.452507 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.452532 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.466807 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.492938 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.511900 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.535585 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.556093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.556138 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.556149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.556166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.556179 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.559797 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.578485 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.594644 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.609764 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.627938 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.646681 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.659413 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.659480 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.659496 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.659520 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.659537 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.669788 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.686348 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.700073 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.717060 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.746932 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:26Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.764191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.764271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.764294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.764322 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.764342 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.867986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.868045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.868057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.868078 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.868092 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.971787 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.971841 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.971853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.971872 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:26 crc kubenswrapper[4810]: I1203 05:42:26.971884 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:26Z","lastTransitionTime":"2025-12-03T05:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.076843 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.076918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.076936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.076963 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.076983 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.180487 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.180559 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.180578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.180608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.180630 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.284880 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.284918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.284928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.284942 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.284952 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.376848 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:27 crc kubenswrapper[4810]: E1203 05:42:27.377094 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.378361 4810 scope.go:117] "RemoveContainer" containerID="51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.387711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.387778 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.387792 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.387813 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.387828 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.491619 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.491676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.491689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.491710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.491724 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.594042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.594123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.594140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.594167 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.594189 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.696472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.696531 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.696546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.696566 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.696585 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.799122 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.799174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.799185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.799205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.799219 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.920141 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.920200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.920403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.920423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.920438 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:27Z","lastTransitionTime":"2025-12-03T05:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.943100 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/2.log" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.946329 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.946959 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.966796 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:27Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.982273 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:27Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:27 crc kubenswrapper[4810]: I1203 05:42:27.996236 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:27Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.008808 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.024290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.024354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.024366 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.024387 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.024402 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.031119 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.044061 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.058924 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.083366 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.098452 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.116839 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.126921 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.126973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.126986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.127006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.127017 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.132041 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.149169 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.165034 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.181948 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.195903 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.206791 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.229527 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.230189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.230249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.230261 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.230284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.230301 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.246515 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.332862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.332917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.332929 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.332949 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.332962 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.376838 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.376868 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:28 crc kubenswrapper[4810]: E1203 05:42:28.377105 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.376874 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:28 crc kubenswrapper[4810]: E1203 05:42:28.377231 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:28 crc kubenswrapper[4810]: E1203 05:42:28.377339 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.442571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.442620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.442631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.442648 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.442660 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.545840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.546136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.546234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.546328 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.546712 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.648594 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.648630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.648639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.648653 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.648663 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.750605 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.750915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.750985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.751047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.751102 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.853718 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.853811 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.853829 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.853856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.853880 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.953010 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/3.log" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.953626 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/2.log" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.955561 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.955650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.955666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.955689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.955704 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:28Z","lastTransitionTime":"2025-12-03T05:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.959677 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" exitCode=1 Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.959717 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.959784 4810 scope.go:117] "RemoveContainer" containerID="51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.961039 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:42:28 crc kubenswrapper[4810]: E1203 05:42:28.961329 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:28 crc kubenswrapper[4810]: I1203 05:42:28.991516 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:28Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.009891 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.029839 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.046401 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.058959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.059170 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.059296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.059454 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.059576 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.067760 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ca57c7d0777d3ae9806beaec886438dfdcbec9dcce6a7f1437bbd129bbdd61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:03Z\\\",\\\"message\\\":\\\"rom k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373066 6494 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 05:42:03.373127 6494 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373369 6494 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373499 6494 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.373529 6494 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:03.374093 6494 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 05:42:03.374150 6494 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 05:42:03.374174 6494 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 05:42:03.374212 6494 factory.go:656] Stopping watch factory\\\\nI1203 05:42:03.374225 6494 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 05:42:03.374241 6494 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 05:42:03.374257 6494 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:28Z\\\",\\\"message\\\":\\\"etwork-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 05:42:28.348247 6848 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 05:42:28.348335 6848 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.348561 6848 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.348818 6848 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.349189 6848 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.349194 6848 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.353219 6848 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1203 05:42:28.353252 6848 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1203 05:42:28.353375 6848 ovnkube.go:599] Stopped ovnkube\\\\nI1203 05:42:28.353421 6848 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 05:42:28.353536 6848 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.083633 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.100101 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.133548 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.148157 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.163184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.163228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.163237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.163253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.163265 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.166797 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.187930 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.209068 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.229921 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.247340 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.266200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.266253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.266270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.266297 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.266321 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.268600 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.292945 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.313021 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.336487 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.369031 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.369100 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.369112 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.369153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.369168 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.376512 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.376632 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.473268 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.473326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.473343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.473371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.473391 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.556497 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.556556 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.556579 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.556620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.556647 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.576619 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.583708 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.583814 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.583832 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.583861 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.583884 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.599782 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.604380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.604518 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.604536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.604557 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.604572 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.627638 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.638066 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.638110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.638127 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.638153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.638171 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.656973 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.661688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.661878 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.661999 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.662104 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.662206 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.677035 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.677260 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.679566 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.679600 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.679612 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.679634 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.679649 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.783758 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.788202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.788396 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.788543 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.788852 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.895338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.895405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.895424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.895449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.895466 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.965087 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/3.log" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.969310 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:42:29 crc kubenswrapper[4810]: E1203 05:42:29.969524 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.988211 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:29Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.998689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.998776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.998787 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.998805 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:29 crc kubenswrapper[4810]: I1203 05:42:29.998816 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:29Z","lastTransitionTime":"2025-12-03T05:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.013245 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.026827 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.042562 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.058594 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.074875 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.093023 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.102056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.102150 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.102171 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.102255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.102280 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.111298 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.126973 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.140413 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.155574 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.169277 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.183904 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.202558 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.204993 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.205277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.205548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.205651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.205960 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.217688 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.232181 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.256676 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:28Z\\\",\\\"message\\\":\\\"etwork-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 05:42:28.348247 6848 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 05:42:28.348335 6848 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.348561 6848 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.348818 6848 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.349189 6848 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.349194 6848 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.353219 6848 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1203 05:42:28.353252 6848 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1203 05:42:28.353375 6848 ovnkube.go:599] Stopped ovnkube\\\\nI1203 05:42:28.353421 6848 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 05:42:28.353536 6848 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.273053 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:30Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.309321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.309720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.309810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.309883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.309952 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.376932 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.376998 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:30 crc kubenswrapper[4810]: E1203 05:42:30.377141 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:30 crc kubenswrapper[4810]: E1203 05:42:30.377552 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.377834 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:30 crc kubenswrapper[4810]: E1203 05:42:30.377954 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.393333 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.412639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.412699 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.412717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.412773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.412793 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.516072 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.516168 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.516187 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.516219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.516239 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.619044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.619091 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.619105 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.619125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.619138 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.721346 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.721416 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.721440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.721475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.721498 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.825161 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.825218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.825236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.825265 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.825286 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.928842 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.928909 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.928925 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.928954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:30 crc kubenswrapper[4810]: I1203 05:42:30.928974 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:30Z","lastTransitionTime":"2025-12-03T05:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.031957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.032015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.032033 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.032058 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.032076 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.135810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.135884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.135903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.135932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.135952 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.239355 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.239407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.239424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.239448 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.239466 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.343194 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.343300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.343320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.343346 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.343364 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.377273 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:31 crc kubenswrapper[4810]: E1203 05:42:31.377508 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.447555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.447640 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.447658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.447692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.447713 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.551671 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.551888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.551918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.551957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.551981 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.655326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.655388 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.655405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.655432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.655449 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.758580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.758638 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.758650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.758673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.758685 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.862843 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.862991 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.863015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.863047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.863071 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.967137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.967231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.967249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.967278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:31 crc kubenswrapper[4810]: I1203 05:42:31.967301 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:31Z","lastTransitionTime":"2025-12-03T05:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.071243 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.071313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.071333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.071363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.071385 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.174274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.174325 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.174341 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.174361 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.174375 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.277218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.277289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.277307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.277357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.277373 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.377045 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.377045 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.377201 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:32 crc kubenswrapper[4810]: E1203 05:42:32.377308 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:32 crc kubenswrapper[4810]: E1203 05:42:32.377480 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:32 crc kubenswrapper[4810]: E1203 05:42:32.377562 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.379715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.379770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.379785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.379834 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.379854 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.483437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.483511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.483531 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.483562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.483583 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.586989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.587081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.587102 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.587135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.587155 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.690429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.690510 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.690530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.690563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.690589 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.794594 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.794688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.794720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.794791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.794819 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.898309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.898401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.898422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.898453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:32 crc kubenswrapper[4810]: I1203 05:42:32.898472 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:32Z","lastTransitionTime":"2025-12-03T05:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.002056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.002131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.002151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.002180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.002201 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.105659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.105776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.105803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.105840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.105860 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.209022 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.209090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.209109 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.209180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.209204 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.312889 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.312969 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.312992 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.313023 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.313047 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.376807 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:33 crc kubenswrapper[4810]: E1203 05:42:33.377055 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.416945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.417013 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.417032 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.417063 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.417084 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.520835 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.520901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.520919 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.520946 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.520964 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.624823 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.624902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.624951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.624985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.625006 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.729246 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.729354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.729381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.729420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.729448 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.832495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.832549 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.832563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.832582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.832597 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.935723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.935799 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.935813 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.935835 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:33 crc kubenswrapper[4810]: I1203 05:42:33.935851 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:33Z","lastTransitionTime":"2025-12-03T05:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.038768 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.038910 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.038933 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.038964 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.038985 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.141826 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.141904 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.141924 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.141955 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.141975 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.246502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.247112 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.247134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.247165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.247185 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.350230 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.350300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.350321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.350349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.350370 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.377125 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.377258 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:34 crc kubenswrapper[4810]: E1203 05:42:34.377349 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:34 crc kubenswrapper[4810]: E1203 05:42:34.377507 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.377153 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:34 crc kubenswrapper[4810]: E1203 05:42:34.377648 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.454095 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.454153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.454167 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.454185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.454198 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.558113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.558195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.558214 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.558243 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.558267 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.661785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.661829 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.661842 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.661864 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.661876 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.765477 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.765548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.765568 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.765595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.765617 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.868490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.868565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.868581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.868603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.868615 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.971212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.971283 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.971300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.971324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:34 crc kubenswrapper[4810]: I1203 05:42:34.971343 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:34Z","lastTransitionTime":"2025-12-03T05:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.074240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.074343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.074367 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.074395 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.074419 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.177791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.177864 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.177883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.177911 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.177931 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.280801 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.280867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.280884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.280914 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.280932 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.376676 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:35 crc kubenswrapper[4810]: E1203 05:42:35.377014 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.389547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.389614 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.389635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.389665 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.389687 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.492296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.492373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.492384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.492402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.492413 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.594828 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.594868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.594877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.594891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.594901 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.698317 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.698407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.698426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.698462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.698481 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.800444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.800485 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.800496 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.800511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.800548 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.903254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.903307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.903320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.903342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:35 crc kubenswrapper[4810]: I1203 05:42:35.903355 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:35Z","lastTransitionTime":"2025-12-03T05:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.005636 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.005678 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.005688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.005705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.005718 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.108381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.108421 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.108430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.108444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.108454 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.211959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.211993 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.212002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.212017 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.212027 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.315773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.315829 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.315838 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.315858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.315870 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.376771 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:36 crc kubenswrapper[4810]: E1203 05:42:36.376980 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.377040 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.377112 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:36 crc kubenswrapper[4810]: E1203 05:42:36.378046 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:36 crc kubenswrapper[4810]: E1203 05:42:36.378238 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.402476 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4279f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61ac6c2e-df95-49c5-a959-0e061e9c5909\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:23Z\\\",\\\"message\\\":\\\"2025-12-03T05:41:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2\\\\n2025-12-03T05:41:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5de8b4c9-cb36-4ea4-8b02-f37cc3e662b2 to /host/opt/cni/bin/\\\\n2025-12-03T05:41:37Z [verbose] multus-daemon started\\\\n2025-12-03T05:41:37Z [verbose] Readiness Indicator file check\\\\n2025-12-03T05:42:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hvdbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4279f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.419962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.420045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.420073 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.420107 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.420133 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.422787 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80d62d26-c895-4b0b-a5b7-252147ad2dc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7867e6fb12bb43d8e3153cef2fbb0f72e086590152e2aba88e776de9c302f2fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abb1d08683d0dae9a49cfc9c5cbc99c8727dddde219ba7190df30d4c92d50a00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e959fb786abd93861a6bc55359a4b69c0747497758c43a9c88c325e047de7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c5dd2614a54cacbfa1e56606140b9a714502aff20de528ef0e0f14ad42f86e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a368c36b77146c04394118a9216c54ff8078adce5e606be0d3c5f8180832fb3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceb2655668f67bf1d45b9ae25ad226def9136cf85af73364986a55a2c858198e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c2b2d69041831409c3f68a9a408e0b9df0554dcd8ea732ad1021c290eba4ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5p56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qx2gg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.439950 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kwjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zx6mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.465575 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b006771a-0092-4cc6-bd30-77a421020e10\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 05:41:28.949311 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 05:41:28.950489 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3388224504/tls.crt::/tmp/serving-cert-3388224504/tls.key\\\\\\\"\\\\nI1203 05:41:34.679139 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 05:41:34.691402 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 05:41:34.691865 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 05:41:34.691932 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 05:41:34.691965 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 05:41:34.705563 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 05:41:34.705603 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705610 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 05:41:34.705617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 05:41:34.705622 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 05:41:34.705629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 05:41:34.705633 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 05:41:34.705706 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 05:41:34.706624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.488935 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.512293 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b22e0d3eae0b27328d660498407295b932efebeee4c20f56f0155803360ddffc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac86a8fe92b1b3177d27c350d120fe4c527a2f48b41df8646e2217789eb48c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.523723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.523801 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.523819 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.523845 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.523864 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.533640 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f04cdf7fe75eaecaeca1808b713959a3e3c3bbf5c6e2c7a51861c7a22d09fdbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.552615 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xqw2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f010726-9024-453c-abb9-a2bbd3ee2337\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86785953eb2773ed874f142154ec2e6fd79fd585f0f1a770197b0f1cdf100df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dpph\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xqw2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.574831 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10611e9-89c8-4796-858c-f96cff66f5c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1385219799b3fc12850720bb291fa6437da7633ec3c7772ac91adf346f01f61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cdfe8e2ef03962686cb731e4fcabf7d54d18caf92a93e69579007545d7d3848\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e82bb853c75029fe41bf9cb20038d34540d7b413c5bbe7d35c76e48c371d415e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.592102 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1fa43fc-c2e8-4fff-a95f-ba5cb19aae83\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d199f42e09199f0d41c5f97d8a32674653aaceaaef15a92667f73bee51f1f8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5a087b6a7a2fa4131f93c65787b04662e70c7329dc26380dba9a8175623f264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e5a087b6a7a2fa4131f93c65787b04662e70c7329dc26380dba9a8175623f264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.613233 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2c68ef2-27e6-4d66-8520-c795a1dcc811\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57a1ee3bf7b993a9de03154d0b7c96abf2d48d3358f8aa079ae5c464b4dc201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b358c140b56000c43b7d80638891dd2f72243193eb265d2018cab3508ffa2c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kwhpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.627796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.627877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.627903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.627941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.627968 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.633410 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.654369 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.676487 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.696540 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.731465 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.731530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.731551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.731576 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.731595 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.731517 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"885c296a-449b-4977-b6d9-396bc84d3cfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T05:42:28Z\\\",\\\"message\\\":\\\"etwork-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 05:42:28.348247 6848 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 05:42:28.348335 6848 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.348561 6848 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.348818 6848 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.349189 6848 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.349194 6848 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 05:42:28.353219 6848 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1203 05:42:28.353252 6848 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1203 05:42:28.353375 6848 ovnkube.go:599] Stopped ovnkube\\\\nI1203 05:42:28.353421 6848 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 05:42:28.353536 6848 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T05:42:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwp8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-64tlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.751318 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccc3de85-f058-4508-b473-96e892aabcd8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://774b93bb62f7f865871be85c3b750102f2686e9b8872ee7ec4ba6bf9c355b20c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8347ebc57ef936d411785113bd13f32c3938ffde6a4ce543865e1e230cc223f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b17103f40f0a2004739cc16c5c302cd000cf798658584257ad7e5f3b6869e11d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fc7f1ce8fbc4424cd4a25b4c398304a9524259781509c8582afcdf1f3de2c93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.787219 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0b68e21f-fa47-45ce-8533-d4a3803bcab3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d153b29b7e136451de4ab047e6b02bb3277b8bd6db24fffdb3e9de3932662133\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19e0746fc75a0a8ad6856852b47659d65945ceddfbdab520e79b2a1d6598aee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://935116c09b8241c2b7c9357b202c7c0e7067e105cde905eb34c28ddf4f0ad919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e26b394f441424661afa01d61eab7d6ade7ee5ba1ffef5cd0d9d49fada27a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d63cb99a20c89eddf64942e6b117842c6cd015af1b8863f9092063fdbe11d8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e90ab01623a3f13720e63912b3889faba3d454575ddea15527dd34479ad9d075\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8504f0341810191426785097d26863ef0afe99fc6155a9b26eb8e3c0be2ddd58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a35ba6f61e570350a5db1d3343ff1cf699bc874a2608cc6fec8fb7fae80cf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T05:41:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T05:41:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:16Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.801856 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7jf2f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10a999b3-cbcc-4cb1-a8ec-9518cd5f2ed8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0ab7a1af8b775907d45691f2ba783c6e30c22d1b2e752093a56416a1c726493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sjmlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7jf2f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:36Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.834348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.834404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.834420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.834442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.834456 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.937279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.937331 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.937342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.937360 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:36 crc kubenswrapper[4810]: I1203 05:42:36.937372 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:36Z","lastTransitionTime":"2025-12-03T05:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.040194 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.040251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.040267 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.040292 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.040306 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.143662 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.143795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.143817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.143845 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.143864 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.247165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.247249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.247276 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.247313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.247341 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.350931 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.351008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.351037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.351068 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.351091 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.376512 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:37 crc kubenswrapper[4810]: E1203 05:42:37.376809 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.455695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.455810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.455831 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.455862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.455881 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.560174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.560235 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.560247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.560268 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.560280 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.663897 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.663959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.663976 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.664001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.664020 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.767469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.767561 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.767587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.767618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.767643 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.870206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.870242 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.870252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.870284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.870313 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.973074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.973179 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.973205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.973240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:37 crc kubenswrapper[4810]: I1203 05:42:37.973267 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:37Z","lastTransitionTime":"2025-12-03T05:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.077399 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.077471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.077489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.077520 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.077543 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.180951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.181018 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.181036 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.181061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.181080 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.284675 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.284781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.284799 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.284824 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.284906 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.377351 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.377420 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:38 crc kubenswrapper[4810]: E1203 05:42:38.377571 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.377655 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:38 crc kubenswrapper[4810]: E1203 05:42:38.377708 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:38 crc kubenswrapper[4810]: E1203 05:42:38.377939 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.388581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.388616 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.388628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.388647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.388662 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.492891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.492965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.492984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.493014 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.493035 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.596093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.596154 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.596173 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.596199 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.596220 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.700459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.700529 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.700552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.700583 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.700608 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.804435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.804502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.804526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.804563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.804586 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.908704 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.908806 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.908827 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.908855 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:38 crc kubenswrapper[4810]: I1203 05:42:38.908875 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:38Z","lastTransitionTime":"2025-12-03T05:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.011815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.011866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.011882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.011905 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.011923 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.114877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.114966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.114985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.115018 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.115041 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.218200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.218288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.218312 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.218342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.218368 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.278894 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.279220 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:43.279162527 +0000 UTC m=+147.214623398 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.322532 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.322612 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.322632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.322663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.322693 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.377298 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.377501 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.380215 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.380293 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.380365 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.380435 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380539 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380611 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380618 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380646 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380662 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380668 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380788 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:43:43.380708953 +0000 UTC m=+147.316169834 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380669 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380854 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 05:43:43.380812836 +0000 UTC m=+147.316273797 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.380894 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 05:43:43.380878597 +0000 UTC m=+147.316339488 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.381072 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.381137 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 05:43:43.381121594 +0000 UTC m=+147.316582445 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.427213 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.427935 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.427984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.428308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.428384 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.532481 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.532589 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.532691 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.532804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.532858 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.636793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.636862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.636881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.636911 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.636936 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.700765 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.700830 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.700850 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.700881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.700942 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.723000 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.728360 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.728416 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.728436 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.728462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.728480 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.752341 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.758285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.758344 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.758444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.758469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.758487 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.780448 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.787260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.787336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.787389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.787421 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.787442 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.812135 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.819492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.819541 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.819560 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.819586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.819606 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.841722 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T05:42:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827cfc94-e475-4138-b039-a83e4376049e\\\",\\\"systemUUID\\\":\\\"246a1c9c-2777-4e36-9872-3cdc4a9802f0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:39Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:39 crc kubenswrapper[4810]: E1203 05:42:39.842030 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.844880 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.844929 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.844947 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.844978 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.844999 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.949336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.949386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.949403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.949429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:39 crc kubenswrapper[4810]: I1203 05:42:39.949450 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:39Z","lastTransitionTime":"2025-12-03T05:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.052909 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.052998 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.053024 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.053057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.053084 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.157371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.157492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.157519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.157564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.157601 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.260903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.260981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.261001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.261029 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.261052 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.363793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.364398 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.364595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.364853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.365050 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.376972 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.376972 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.377013 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:40 crc kubenswrapper[4810]: E1203 05:42:40.377900 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:40 crc kubenswrapper[4810]: E1203 05:42:40.378195 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:40 crc kubenswrapper[4810]: E1203 05:42:40.378044 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.468477 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.468546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.468571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.468602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.468627 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.571118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.571195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.571233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.571264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.571288 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.675124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.675190 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.675207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.675234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.675256 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.779258 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.779335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.779354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.779382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.779403 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.884039 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.884114 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.884135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.884162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.884181 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.987614 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.987696 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.987715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.987821 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:40 crc kubenswrapper[4810]: I1203 05:42:40.987849 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:40Z","lastTransitionTime":"2025-12-03T05:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.091106 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.091409 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.091429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.091458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.091477 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.194985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.195075 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.195100 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.195137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.195163 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.298625 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.298693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.298720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.298796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.298822 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.377491 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:41 crc kubenswrapper[4810]: E1203 05:42:41.377776 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.379023 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:42:41 crc kubenswrapper[4810]: E1203 05:42:41.379333 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.402769 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.402830 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.402851 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.402882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.402908 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.506632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.506716 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.506792 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.506827 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.506849 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.610386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.610458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.610475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.610501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.610522 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.714451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.714525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.714543 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.714569 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.714587 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.818545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.818637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.818659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.818688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.818715 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.922150 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.922231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.922259 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.922296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:41 crc kubenswrapper[4810]: I1203 05:42:41.922321 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:41Z","lastTransitionTime":"2025-12-03T05:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.025420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.025504 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.025525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.025554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.025577 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.130763 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.130836 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.130855 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.130883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.130904 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.234963 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.235019 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.235038 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.235065 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.235086 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.338509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.338568 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.338585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.338610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.338630 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.377890 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:42 crc kubenswrapper[4810]: E1203 05:42:42.378094 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.378347 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:42 crc kubenswrapper[4810]: E1203 05:42:42.378484 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.378556 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:42 crc kubenswrapper[4810]: E1203 05:42:42.378873 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.442162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.442227 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.442248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.442273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.442293 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.546061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.546135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.546155 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.546182 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.546202 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.649454 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.649529 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.649550 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.649577 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.649598 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.752920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.752996 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.753016 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.753047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.753068 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.857048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.857418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.857621 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.857921 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.858123 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.962278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.962334 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.962355 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.962385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:42 crc kubenswrapper[4810]: I1203 05:42:42.962408 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:42Z","lastTransitionTime":"2025-12-03T05:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.066999 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.067067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.067089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.067120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.067143 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.171008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.171087 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.171110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.171140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.171161 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.276026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.276094 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.276114 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.276144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.276165 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.376568 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:43 crc kubenswrapper[4810]: E1203 05:42:43.376769 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.379357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.379422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.379443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.379531 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.379756 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.484203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.484254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.484269 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.484293 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.484311 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.587138 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.587222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.587246 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.587276 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.587300 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.691554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.691663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.691687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.691723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.691782 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.795147 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.795205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.795224 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.795251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.795272 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.899002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.899074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.899096 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.899125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:43 crc kubenswrapper[4810]: I1203 05:42:43.899148 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:43Z","lastTransitionTime":"2025-12-03T05:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.002886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.002977 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.003010 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.003041 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.003058 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.106110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.106434 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.106545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.106641 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.106788 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.210420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.210483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.210503 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.210530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.210549 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.314115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.314192 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.314211 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.314239 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.314259 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.376828 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.377512 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:44 crc kubenswrapper[4810]: E1203 05:42:44.377618 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.377639 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:44 crc kubenswrapper[4810]: E1203 05:42:44.378280 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:44 crc kubenswrapper[4810]: E1203 05:42:44.378533 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.417224 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.417561 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.417684 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.417877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.417989 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.520676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.520714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.520724 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.520762 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.520771 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.624449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.624975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.625203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.625397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.625546 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.729157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.730135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.730293 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.730432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.730595 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.834285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.834345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.834359 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.834378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.834392 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.937534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.937603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.937616 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.937639 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:44 crc kubenswrapper[4810]: I1203 05:42:44.937655 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:44Z","lastTransitionTime":"2025-12-03T05:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.041304 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.041863 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.042049 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.042228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.042361 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.146163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.146599 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.146816 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.147025 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.147201 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.251127 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.251194 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.251217 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.251248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.251267 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.354227 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.354312 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.354336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.354372 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.354402 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.376776 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:45 crc kubenswrapper[4810]: E1203 05:42:45.376967 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.466918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.467044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.467074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.467107 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.467132 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.570941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.571000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.571021 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.571052 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.571078 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.674792 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.674862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.674886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.674965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.674992 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.779023 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.779090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.779109 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.779137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.779157 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.883184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.883264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.883286 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.883316 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.883375 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.986390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.986482 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.986519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.986558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:45 crc kubenswrapper[4810]: I1203 05:42:45.986584 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:45Z","lastTransitionTime":"2025-12-03T05:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.090698 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.090810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.090831 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.090863 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.090934 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.194596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.194653 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.194666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.194701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.194718 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.299220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.299305 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.299322 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.299349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.299370 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.376647 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.376780 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.377138 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:46 crc kubenswrapper[4810]: E1203 05:42:46.377384 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:46 crc kubenswrapper[4810]: E1203 05:42:46.377570 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:46 crc kubenswrapper[4810]: E1203 05:42:46.377853 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.402210 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6871b45dc2c4230f515fe5fef99bf10335ccf5e756b17aafaa961e86c7e5a2ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.403060 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.403119 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.403142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.403178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.403205 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.423973 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.446588 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.469595 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc7906ed-7d0a-444b-8e14-12c67bc3301e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T05:41:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb15e33102f7195436d2daf3f72a36a6d4a5ddbae0282742e2df739a4ee096\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T05:41:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4z54p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T05:41:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2hd85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T05:42:46Z is after 2025-08-24T17:21:41Z" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.506679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.507209 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.507456 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.507619 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.507816 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.576181 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kwhpz" podStartSLOduration=70.576138823 podStartE2EDuration="1m10.576138823s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.550544254 +0000 UTC m=+90.486005125" watchObservedRunningTime="2025-12-03 05:42:46.576138823 +0000 UTC m=+90.511599704" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.612600 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.612670 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.612690 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.612717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.612769 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.622347 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.622322587 podStartE2EDuration="37.622322587s" podCreationTimestamp="2025-12-03 05:42:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.575806385 +0000 UTC m=+90.511267296" watchObservedRunningTime="2025-12-03 05:42:46.622322587 +0000 UTC m=+90.557783458" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.622642 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=71.622634644 podStartE2EDuration="1m11.622634644s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.622202494 +0000 UTC m=+90.557663415" watchObservedRunningTime="2025-12-03 05:42:46.622634644 +0000 UTC m=+90.558095515" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.643189 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-7jf2f" podStartSLOduration=71.643066455 podStartE2EDuration="1m11.643066455s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.641301611 +0000 UTC m=+90.576762492" watchObservedRunningTime="2025-12-03 05:42:46.643066455 +0000 UTC m=+90.578527336" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.677496 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=71.677449873 podStartE2EDuration="1m11.677449873s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.676992082 +0000 UTC m=+90.612452933" watchObservedRunningTime="2025-12-03 05:42:46.677449873 +0000 UTC m=+90.612910764" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.716166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.716216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.716226 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.716244 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.716255 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.768011 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-xqw2n" podStartSLOduration=71.767986814 podStartE2EDuration="1m11.767986814s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.765987724 +0000 UTC m=+90.701448605" watchObservedRunningTime="2025-12-03 05:42:46.767986814 +0000 UTC m=+90.703447655" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.816554 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-qx2gg" podStartSLOduration=71.816531137 podStartE2EDuration="1m11.816531137s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.815655055 +0000 UTC m=+90.751115896" watchObservedRunningTime="2025-12-03 05:42:46.816531137 +0000 UTC m=+90.751991978" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.816761 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-4279f" podStartSLOduration=71.816756892 podStartE2EDuration="1m11.816756892s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.784553708 +0000 UTC m=+90.720014549" watchObservedRunningTime="2025-12-03 05:42:46.816756892 +0000 UTC m=+90.752217733" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.819250 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.819305 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.819318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.819340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.819353 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.848448 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=72.848421803 podStartE2EDuration="1m12.848421803s" podCreationTimestamp="2025-12-03 05:41:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.846935246 +0000 UTC m=+90.782396107" watchObservedRunningTime="2025-12-03 05:42:46.848421803 +0000 UTC m=+90.783882664" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.921961 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.922030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.922046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.922071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:46 crc kubenswrapper[4810]: I1203 05:42:46.922085 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:46Z","lastTransitionTime":"2025-12-03T05:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.024722 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.024803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.024818 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.024840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.024865 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.127931 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.127994 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.128012 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.128312 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.128428 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.231908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.232000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.232021 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.232048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.232067 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.335422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.335501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.335521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.335547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.335568 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.377251 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:47 crc kubenswrapper[4810]: E1203 05:42:47.377597 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.439632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.439717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.439775 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.439804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.439825 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.543820 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.543931 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.543957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.543993 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.544016 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.647837 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.647915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.647933 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.647962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.647980 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.752264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.752342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.752362 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.752392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.752410 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.856617 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.856677 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.856690 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.856710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.856724 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.960714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.960834 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.960855 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.960885 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:47 crc kubenswrapper[4810]: I1203 05:42:47.960905 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:47Z","lastTransitionTime":"2025-12-03T05:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.064587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.064780 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.064810 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.064856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.064881 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.169137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.169223 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.169248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.169284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.169312 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.272882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.272944 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.272957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.272975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.272988 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.376363 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.376512 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.376594 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.376915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.376950 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.376972 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.377004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: E1203 05:42:48.376990 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.377027 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: E1203 05:42:48.377160 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:48 crc kubenswrapper[4810]: E1203 05:42:48.377037 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.480123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.480219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.480252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.480274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.480288 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.583881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.583943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.583954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.583973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.583985 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.687839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.687894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.687911 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.687939 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.687957 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.791669 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.791801 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.791825 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.791854 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.791874 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.895356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.895417 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.895437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.895462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:48 crc kubenswrapper[4810]: I1203 05:42:48.895481 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:48Z","lastTransitionTime":"2025-12-03T05:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.000483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.000536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.000547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.000568 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.000583 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.103649 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.103701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.103712 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.103746 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.103757 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.206957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.207120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.207141 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.207171 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.207195 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.310138 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.310279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.310310 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.310338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.310356 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.377320 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:49 crc kubenswrapper[4810]: E1203 05:42:49.377569 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.413393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.413470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.413489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.413517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.413539 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.517394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.517453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.517470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.517498 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.517514 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.620265 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.620354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.620379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.620407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.620427 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.723560 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.723650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.723671 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.723700 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.723720 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.826691 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.826817 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.826839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.826868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.826888 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.930727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.930881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.930903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.930966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:49 crc kubenswrapper[4810]: I1203 05:42:49.930992 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:49Z","lastTransitionTime":"2025-12-03T05:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.034292 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.034358 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.034377 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.034408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.034428 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:50Z","lastTransitionTime":"2025-12-03T05:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.091508 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.091584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.091604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.091635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.091657 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T05:42:50Z","lastTransitionTime":"2025-12-03T05:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.163623 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=20.163594632 podStartE2EDuration="20.163594632s" podCreationTimestamp="2025-12-03 05:42:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:46.862397812 +0000 UTC m=+90.797858673" watchObservedRunningTime="2025-12-03 05:42:50.163594632 +0000 UTC m=+94.099055503" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.165214 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9"] Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.165879 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.169374 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.171388 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.171701 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.172023 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.216821 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.216933 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.217028 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.217081 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.217155 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.255995 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podStartSLOduration=75.255972789 podStartE2EDuration="1m15.255972789s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:50.254683837 +0000 UTC m=+94.190144688" watchObservedRunningTime="2025-12-03 05:42:50.255972789 +0000 UTC m=+94.191433640" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318280 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318395 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318445 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318463 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318509 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318552 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.318587 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.320224 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.325293 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.348893 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75f2e404-4b3b-4751-a1f4-e9b7f2f8279d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6s5r9\" (UID: \"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.376696 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.376717 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.376725 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:50 crc kubenswrapper[4810]: E1203 05:42:50.376945 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:50 crc kubenswrapper[4810]: E1203 05:42:50.377330 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:50 crc kubenswrapper[4810]: E1203 05:42:50.377419 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:50 crc kubenswrapper[4810]: I1203 05:42:50.496660 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" Dec 03 05:42:51 crc kubenswrapper[4810]: I1203 05:42:51.062251 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" event={"ID":"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d","Type":"ContainerStarted","Data":"c92d927abd181862366b6b5856d94195bbf18d06f08f748ce166adbe9e98c9d7"} Dec 03 05:42:51 crc kubenswrapper[4810]: I1203 05:42:51.062589 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" event={"ID":"75f2e404-4b3b-4751-a1f4-e9b7f2f8279d","Type":"ContainerStarted","Data":"f7c8f789fa45eeedb4af5d7d096b24655939a846673549a7d67e2c58fc381a5f"} Dec 03 05:42:51 crc kubenswrapper[4810]: I1203 05:42:51.080006 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6s5r9" podStartSLOduration=76.079980137 podStartE2EDuration="1m16.079980137s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:42:51.078858939 +0000 UTC m=+95.014319810" watchObservedRunningTime="2025-12-03 05:42:51.079980137 +0000 UTC m=+95.015441018" Dec 03 05:42:51 crc kubenswrapper[4810]: I1203 05:42:51.377258 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:51 crc kubenswrapper[4810]: E1203 05:42:51.377436 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:52 crc kubenswrapper[4810]: I1203 05:42:52.376987 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:52 crc kubenswrapper[4810]: I1203 05:42:52.377030 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:52 crc kubenswrapper[4810]: I1203 05:42:52.377116 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:52 crc kubenswrapper[4810]: E1203 05:42:52.377162 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:52 crc kubenswrapper[4810]: E1203 05:42:52.377254 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:52 crc kubenswrapper[4810]: E1203 05:42:52.377333 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:53 crc kubenswrapper[4810]: I1203 05:42:53.377090 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:53 crc kubenswrapper[4810]: E1203 05:42:53.377340 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:54 crc kubenswrapper[4810]: I1203 05:42:54.376923 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:54 crc kubenswrapper[4810]: I1203 05:42:54.376998 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:54 crc kubenswrapper[4810]: E1203 05:42:54.377140 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:54 crc kubenswrapper[4810]: I1203 05:42:54.377412 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:54 crc kubenswrapper[4810]: E1203 05:42:54.377497 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:54 crc kubenswrapper[4810]: E1203 05:42:54.377663 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:54 crc kubenswrapper[4810]: I1203 05:42:54.584898 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:54 crc kubenswrapper[4810]: E1203 05:42:54.585012 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:42:54 crc kubenswrapper[4810]: E1203 05:42:54.585065 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs podName:7283fe50-3c8e-4b8b-90ac-80e0e2c9a746 nodeName:}" failed. No retries permitted until 2025-12-03 05:43:58.585051278 +0000 UTC m=+162.520512119 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs") pod "network-metrics-daemon-zx6mp" (UID: "7283fe50-3c8e-4b8b-90ac-80e0e2c9a746") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 05:42:55 crc kubenswrapper[4810]: I1203 05:42:55.376625 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:55 crc kubenswrapper[4810]: E1203 05:42:55.376873 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:55 crc kubenswrapper[4810]: I1203 05:42:55.378081 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:42:55 crc kubenswrapper[4810]: E1203 05:42:55.378351 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-64tlm_openshift-ovn-kubernetes(885c296a-449b-4977-b6d9-396bc84d3cfa)\"" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" Dec 03 05:42:56 crc kubenswrapper[4810]: I1203 05:42:56.376795 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:56 crc kubenswrapper[4810]: I1203 05:42:56.376834 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:56 crc kubenswrapper[4810]: I1203 05:42:56.376911 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:56 crc kubenswrapper[4810]: E1203 05:42:56.379246 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:56 crc kubenswrapper[4810]: E1203 05:42:56.379468 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:56 crc kubenswrapper[4810]: E1203 05:42:56.379662 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:57 crc kubenswrapper[4810]: I1203 05:42:57.376300 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:57 crc kubenswrapper[4810]: E1203 05:42:57.376430 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:42:58 crc kubenswrapper[4810]: I1203 05:42:58.377078 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:42:58 crc kubenswrapper[4810]: I1203 05:42:58.377207 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:42:58 crc kubenswrapper[4810]: I1203 05:42:58.377331 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:42:58 crc kubenswrapper[4810]: E1203 05:42:58.377290 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:42:58 crc kubenswrapper[4810]: E1203 05:42:58.377536 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:42:58 crc kubenswrapper[4810]: E1203 05:42:58.377704 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:42:59 crc kubenswrapper[4810]: I1203 05:42:59.377143 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:42:59 crc kubenswrapper[4810]: E1203 05:42:59.377290 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:00 crc kubenswrapper[4810]: I1203 05:43:00.376769 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:00 crc kubenswrapper[4810]: E1203 05:43:00.376866 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:00 crc kubenswrapper[4810]: I1203 05:43:00.377030 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:00 crc kubenswrapper[4810]: E1203 05:43:00.377075 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:00 crc kubenswrapper[4810]: I1203 05:43:00.377524 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:00 crc kubenswrapper[4810]: E1203 05:43:00.377576 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:01 crc kubenswrapper[4810]: I1203 05:43:01.376676 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:01 crc kubenswrapper[4810]: E1203 05:43:01.377010 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:02 crc kubenswrapper[4810]: I1203 05:43:02.376497 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:02 crc kubenswrapper[4810]: I1203 05:43:02.376674 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:02 crc kubenswrapper[4810]: I1203 05:43:02.376919 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:02 crc kubenswrapper[4810]: E1203 05:43:02.376921 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:02 crc kubenswrapper[4810]: E1203 05:43:02.377042 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:02 crc kubenswrapper[4810]: E1203 05:43:02.377105 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:03 crc kubenswrapper[4810]: I1203 05:43:03.376484 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:03 crc kubenswrapper[4810]: E1203 05:43:03.376762 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:04 crc kubenswrapper[4810]: I1203 05:43:04.377000 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:04 crc kubenswrapper[4810]: E1203 05:43:04.377217 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:04 crc kubenswrapper[4810]: I1203 05:43:04.377327 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:04 crc kubenswrapper[4810]: E1203 05:43:04.377542 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:04 crc kubenswrapper[4810]: I1203 05:43:04.378003 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:04 crc kubenswrapper[4810]: E1203 05:43:04.378072 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:05 crc kubenswrapper[4810]: I1203 05:43:05.376655 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:05 crc kubenswrapper[4810]: E1203 05:43:05.376956 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:06 crc kubenswrapper[4810]: I1203 05:43:06.376938 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:06 crc kubenswrapper[4810]: I1203 05:43:06.377052 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:06 crc kubenswrapper[4810]: I1203 05:43:06.379305 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:06 crc kubenswrapper[4810]: E1203 05:43:06.379283 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:06 crc kubenswrapper[4810]: E1203 05:43:06.379459 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:06 crc kubenswrapper[4810]: E1203 05:43:06.379573 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:07 crc kubenswrapper[4810]: I1203 05:43:07.377304 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:07 crc kubenswrapper[4810]: E1203 05:43:07.377643 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:08 crc kubenswrapper[4810]: I1203 05:43:08.377209 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:08 crc kubenswrapper[4810]: I1203 05:43:08.377812 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:08 crc kubenswrapper[4810]: E1203 05:43:08.378085 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:08 crc kubenswrapper[4810]: I1203 05:43:08.378165 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:08 crc kubenswrapper[4810]: E1203 05:43:08.378632 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:08 crc kubenswrapper[4810]: E1203 05:43:08.378952 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:09 crc kubenswrapper[4810]: I1203 05:43:09.376977 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:09 crc kubenswrapper[4810]: E1203 05:43:09.377227 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:09 crc kubenswrapper[4810]: I1203 05:43:09.378841 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.133575 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/3.log" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.137593 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerStarted","Data":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.138083 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.139179 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/1.log" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.139601 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/0.log" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.139644 4810 generic.go:334] "Generic (PLEG): container finished" podID="61ac6c2e-df95-49c5-a959-0e061e9c5909" containerID="ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533" exitCode=1 Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.139669 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerDied","Data":"ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533"} Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.139703 4810 scope.go:117] "RemoveContainer" containerID="2c43e76eee7da8e09c80328baa16085eaaca06b9e12b55329f4c5c34fc42ee66" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.140129 4810 scope.go:117] "RemoveContainer" containerID="ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533" Dec 03 05:43:10 crc kubenswrapper[4810]: E1203 05:43:10.140268 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-4279f_openshift-multus(61ac6c2e-df95-49c5-a959-0e061e9c5909)\"" pod="openshift-multus/multus-4279f" podUID="61ac6c2e-df95-49c5-a959-0e061e9c5909" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.189287 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podStartSLOduration=94.189262964 podStartE2EDuration="1m34.189262964s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:10.169676354 +0000 UTC m=+114.105137195" watchObservedRunningTime="2025-12-03 05:43:10.189262964 +0000 UTC m=+114.124723815" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.324300 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zx6mp"] Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.324421 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:10 crc kubenswrapper[4810]: E1203 05:43:10.324518 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.377046 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:10 crc kubenswrapper[4810]: I1203 05:43:10.377061 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:10 crc kubenswrapper[4810]: E1203 05:43:10.377332 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:10 crc kubenswrapper[4810]: E1203 05:43:10.377443 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:11 crc kubenswrapper[4810]: I1203 05:43:11.147288 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/1.log" Dec 03 05:43:11 crc kubenswrapper[4810]: I1203 05:43:11.376369 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:11 crc kubenswrapper[4810]: E1203 05:43:11.376570 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:12 crc kubenswrapper[4810]: I1203 05:43:12.377167 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:12 crc kubenswrapper[4810]: I1203 05:43:12.377335 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:12 crc kubenswrapper[4810]: I1203 05:43:12.377188 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:12 crc kubenswrapper[4810]: E1203 05:43:12.377553 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:12 crc kubenswrapper[4810]: E1203 05:43:12.377720 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:12 crc kubenswrapper[4810]: E1203 05:43:12.377459 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:13 crc kubenswrapper[4810]: I1203 05:43:13.376350 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:13 crc kubenswrapper[4810]: E1203 05:43:13.376987 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:14 crc kubenswrapper[4810]: I1203 05:43:14.377132 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:14 crc kubenswrapper[4810]: I1203 05:43:14.377220 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:14 crc kubenswrapper[4810]: I1203 05:43:14.377132 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:14 crc kubenswrapper[4810]: E1203 05:43:14.377331 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:14 crc kubenswrapper[4810]: E1203 05:43:14.377556 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:14 crc kubenswrapper[4810]: E1203 05:43:14.378149 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:15 crc kubenswrapper[4810]: I1203 05:43:15.376867 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:15 crc kubenswrapper[4810]: E1203 05:43:15.377092 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:16 crc kubenswrapper[4810]: E1203 05:43:16.347676 4810 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 05:43:16 crc kubenswrapper[4810]: I1203 05:43:16.376441 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:16 crc kubenswrapper[4810]: I1203 05:43:16.376441 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:16 crc kubenswrapper[4810]: I1203 05:43:16.378493 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:16 crc kubenswrapper[4810]: E1203 05:43:16.378490 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:16 crc kubenswrapper[4810]: E1203 05:43:16.378630 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:16 crc kubenswrapper[4810]: E1203 05:43:16.378714 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:16 crc kubenswrapper[4810]: E1203 05:43:16.468025 4810 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 05:43:17 crc kubenswrapper[4810]: I1203 05:43:17.376673 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:17 crc kubenswrapper[4810]: E1203 05:43:17.376883 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:18 crc kubenswrapper[4810]: I1203 05:43:18.377167 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:18 crc kubenswrapper[4810]: E1203 05:43:18.377415 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:18 crc kubenswrapper[4810]: I1203 05:43:18.377858 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:18 crc kubenswrapper[4810]: E1203 05:43:18.377963 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:18 crc kubenswrapper[4810]: I1203 05:43:18.377990 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:18 crc kubenswrapper[4810]: E1203 05:43:18.378056 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:19 crc kubenswrapper[4810]: I1203 05:43:19.376906 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:19 crc kubenswrapper[4810]: E1203 05:43:19.377077 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:20 crc kubenswrapper[4810]: I1203 05:43:20.376841 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:20 crc kubenswrapper[4810]: I1203 05:43:20.376934 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:20 crc kubenswrapper[4810]: I1203 05:43:20.376862 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:20 crc kubenswrapper[4810]: E1203 05:43:20.377090 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:20 crc kubenswrapper[4810]: E1203 05:43:20.377198 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:20 crc kubenswrapper[4810]: E1203 05:43:20.377312 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:21 crc kubenswrapper[4810]: I1203 05:43:21.377396 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:21 crc kubenswrapper[4810]: E1203 05:43:21.377602 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:21 crc kubenswrapper[4810]: E1203 05:43:21.469971 4810 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 05:43:22 crc kubenswrapper[4810]: I1203 05:43:22.377089 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:22 crc kubenswrapper[4810]: I1203 05:43:22.377110 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:22 crc kubenswrapper[4810]: I1203 05:43:22.377246 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:22 crc kubenswrapper[4810]: E1203 05:43:22.377235 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:22 crc kubenswrapper[4810]: E1203 05:43:22.377430 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:22 crc kubenswrapper[4810]: E1203 05:43:22.377472 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:23 crc kubenswrapper[4810]: I1203 05:43:23.376900 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:23 crc kubenswrapper[4810]: E1203 05:43:23.377089 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:24 crc kubenswrapper[4810]: I1203 05:43:24.377203 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:24 crc kubenswrapper[4810]: I1203 05:43:24.377253 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:24 crc kubenswrapper[4810]: E1203 05:43:24.377355 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:24 crc kubenswrapper[4810]: I1203 05:43:24.377370 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:24 crc kubenswrapper[4810]: E1203 05:43:24.377545 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:24 crc kubenswrapper[4810]: E1203 05:43:24.377641 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:25 crc kubenswrapper[4810]: I1203 05:43:25.377480 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:25 crc kubenswrapper[4810]: E1203 05:43:25.377889 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:25 crc kubenswrapper[4810]: I1203 05:43:25.378181 4810 scope.go:117] "RemoveContainer" containerID="ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533" Dec 03 05:43:26 crc kubenswrapper[4810]: I1203 05:43:26.208983 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/1.log" Dec 03 05:43:26 crc kubenswrapper[4810]: I1203 05:43:26.209507 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerStarted","Data":"75a1cbee2ae5b02f05cab9fd6b558ab3a7960ce58042a9d876cfb52c90f3e927"} Dec 03 05:43:26 crc kubenswrapper[4810]: I1203 05:43:26.376754 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:26 crc kubenswrapper[4810]: I1203 05:43:26.376849 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:26 crc kubenswrapper[4810]: E1203 05:43:26.378195 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:26 crc kubenswrapper[4810]: I1203 05:43:26.378215 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:26 crc kubenswrapper[4810]: E1203 05:43:26.378333 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:26 crc kubenswrapper[4810]: E1203 05:43:26.378487 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:26 crc kubenswrapper[4810]: E1203 05:43:26.470533 4810 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 05:43:27 crc kubenswrapper[4810]: I1203 05:43:27.376520 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:27 crc kubenswrapper[4810]: E1203 05:43:27.377062 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:28 crc kubenswrapper[4810]: I1203 05:43:28.377116 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:28 crc kubenswrapper[4810]: I1203 05:43:28.377239 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:28 crc kubenswrapper[4810]: E1203 05:43:28.377256 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:28 crc kubenswrapper[4810]: I1203 05:43:28.377418 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:28 crc kubenswrapper[4810]: E1203 05:43:28.377555 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:28 crc kubenswrapper[4810]: E1203 05:43:28.377661 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:29 crc kubenswrapper[4810]: I1203 05:43:29.376634 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:29 crc kubenswrapper[4810]: E1203 05:43:29.376815 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:30 crc kubenswrapper[4810]: I1203 05:43:30.377210 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:30 crc kubenswrapper[4810]: I1203 05:43:30.377291 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:30 crc kubenswrapper[4810]: I1203 05:43:30.377210 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:30 crc kubenswrapper[4810]: E1203 05:43:30.377432 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 05:43:30 crc kubenswrapper[4810]: E1203 05:43:30.377680 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zx6mp" podUID="7283fe50-3c8e-4b8b-90ac-80e0e2c9a746" Dec 03 05:43:30 crc kubenswrapper[4810]: E1203 05:43:30.377884 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 05:43:31 crc kubenswrapper[4810]: I1203 05:43:31.376842 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:31 crc kubenswrapper[4810]: E1203 05:43:31.377089 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.377459 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.377592 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.377495 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.381127 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.381619 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.381633 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 05:43:32 crc kubenswrapper[4810]: I1203 05:43:32.381909 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 05:43:33 crc kubenswrapper[4810]: I1203 05:43:33.377383 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:33 crc kubenswrapper[4810]: I1203 05:43:33.379873 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 05:43:33 crc kubenswrapper[4810]: I1203 05:43:33.379866 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.117723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.156148 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-djbsd"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.156648 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.159588 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.160560 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.160563 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tp4fv"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.162151 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.165480 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166013 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166271 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fvwpx"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166506 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166572 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166810 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166834 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.166958 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.167044 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.167090 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.167372 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.167915 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.167956 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.168000 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.207158 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.207873 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.209269 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.209932 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.210102 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.210865 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.211311 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.212994 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-qhkvl"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213117 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213249 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213532 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213597 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213724 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213760 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213935 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.213955 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214123 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214196 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214228 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214386 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.215052 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214423 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214445 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214455 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214471 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214684 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214786 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214830 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214923 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.214994 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.215087 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.217190 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.217342 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.222906 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2cbhj"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.223414 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.224405 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.225253 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.225639 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.225822 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.225879 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.225970 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226052 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226177 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226061 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226121 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226665 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226701 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b8ps5"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226842 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226982 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.226121 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.225641 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.227857 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.228873 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.229526 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bbzns"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.229956 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-fjrzc"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.230054 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.230281 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m8wws"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.230709 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.231193 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.231491 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.232978 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.233621 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.236655 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.237200 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.237567 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.237687 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.237964 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.239243 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.239627 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.239790 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.239960 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.241564 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.241839 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.242150 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.242807 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243020 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243203 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243337 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243423 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243472 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243707 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.243910 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.244067 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.244075 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.244199 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.244773 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.245613 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cqx4p"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.246098 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.248502 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.248835 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.249489 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.249689 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.250145 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zk295"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.250985 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.252499 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.269642 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.270769 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.271641 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.272127 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.272470 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.282914 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-zqmjf"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285160 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285248 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-serving-cert\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285295 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-audit-dir\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285347 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-serving-cert\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285376 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-config\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285405 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285450 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-image-import-ca\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285480 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-audit-policies\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285513 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.285558 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-encryption-config\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.286077 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.286657 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.287126 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.290095 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-etcd-serving-ca\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.290144 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.300291 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-etcd-client\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.300492 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe9221b2-2ee0-4fa7-846f-f37559bf8631-config\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.300616 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-config\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.300788 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fe9221b2-2ee0-4fa7-846f-f37559bf8631-images\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.300934 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-audit\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.301692 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.301770 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.301844 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.301929 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.301994 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.302051 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.302784 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303010 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c34a2d47-0bc4-4100-bd82-d2bf8e571129-serving-cert\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303095 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303254 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhwpt\" (UniqueName: \"kubernetes.io/projected/139d2f9e-37ad-4a2e-9061-99e7592e68b4-kube-api-access-zhwpt\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303357 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a618272-575b-49a0-983c-64ac1d55259a-audit-dir\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303449 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe9221b2-2ee0-4fa7-846f-f37559bf8631-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303552 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvwvs\" (UniqueName: \"kubernetes.io/projected/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-kube-api-access-pvwvs\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303656 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-client-ca\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303778 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/139d2f9e-37ad-4a2e-9061-99e7592e68b4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303892 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j44bk\" (UniqueName: \"kubernetes.io/projected/4a618272-575b-49a0-983c-64ac1d55259a-kube-api-access-j44bk\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.303576 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304019 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304120 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdcjk\" (UniqueName: \"kubernetes.io/projected/fe9221b2-2ee0-4fa7-846f-f37559bf8631-kube-api-access-cdcjk\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304231 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-etcd-client\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304348 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tm65\" (UniqueName: \"kubernetes.io/projected/c34a2d47-0bc4-4100-bd82-d2bf8e571129-kube-api-access-5tm65\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304130 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304536 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304537 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/139d2f9e-37ad-4a2e-9061-99e7592e68b4-serving-cert\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304626 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-node-pullsecrets\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304658 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304675 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-encryption-config\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304718 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304269 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.305076 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.310495 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304315 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.311439 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.311558 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304358 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304402 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.311917 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.304453 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.313354 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.313503 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.313550 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.314296 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.314348 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.316966 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.317421 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.317855 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.318108 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.318835 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.319771 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jj69p"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.320511 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-22pk2"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.320909 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.321271 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.322318 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.326305 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.335226 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.336094 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.336824 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.337631 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.337969 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.338486 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.339010 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.339097 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.339598 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.340243 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.340835 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.341229 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g7l7x"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.342314 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.342755 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-djbsd"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.344045 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.345022 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.351888 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.354623 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.356357 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rfxm8"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.359129 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.362799 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tp4fv"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.363013 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.363119 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nbzmx"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.365113 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.366478 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.370477 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.372083 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.374675 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.376016 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-fjrzc"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.377599 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b8ps5"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.378940 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.380020 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2cbhj"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.384464 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.387186 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zk295"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.388927 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.390462 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cqx4p"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.391527 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fvwpx"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.392886 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.395201 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.396438 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.398973 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.399077 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.400414 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g7l7x"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.401440 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.402627 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.404162 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.404886 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jj69p"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405336 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/139d2f9e-37ad-4a2e-9061-99e7592e68b4-serving-cert\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405370 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkjt7\" (UniqueName: \"kubernetes.io/projected/373c7218-dd5b-411d-bf82-94d13f4ca81a-kube-api-access-xkjt7\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405394 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-service-ca\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405416 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-node-pullsecrets\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405434 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-encryption-config\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405458 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405525 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-client-ca\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405561 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e60eef75-abf0-4b05-94fc-430010bbe664-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405647 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-serving-cert\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405673 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-audit-dir\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405695 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-serving-cert\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405722 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-config\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405759 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnd4f\" (UniqueName: \"kubernetes.io/projected/e60eef75-abf0-4b05-94fc-430010bbe664-kube-api-access-jnd4f\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405789 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405818 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vflq7\" (UniqueName: \"kubernetes.io/projected/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-kube-api-access-vflq7\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405864 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-image-import-ca\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405888 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-config\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405907 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/373c7218-dd5b-411d-bf82-94d13f4ca81a-serving-cert\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405932 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-config\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405960 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-audit-policies\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.405982 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406005 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e60eef75-abf0-4b05-94fc-430010bbe664-metrics-tls\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406049 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-encryption-config\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406074 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-serving-cert\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406099 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-etcd-serving-ca\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406122 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406145 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-client\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406171 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89zh9\" (UniqueName: \"kubernetes.io/projected/48979d33-9725-4066-819c-9b1f8f2c62a6-kube-api-access-89zh9\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406210 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-etcd-client\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe9221b2-2ee0-4fa7-846f-f37559bf8631-config\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406266 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-config\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406283 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.406289 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fe9221b2-2ee0-4fa7-846f-f37559bf8631-images\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407013 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m8wws"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407145 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-qhkvl"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407207 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-audit\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407230 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48979d33-9725-4066-819c-9b1f8f2c62a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407248 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-ca\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407286 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c34a2d47-0bc4-4100-bd82-d2bf8e571129-serving-cert\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.407293 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fe9221b2-2ee0-4fa7-846f-f37559bf8631-images\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.408131 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.408361 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-config\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.408421 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-node-pullsecrets\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409037 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-image-import-ca\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409117 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409290 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-etcd-serving-ca\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409534 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-audit-dir\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409573 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-config\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409436 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-22pk2"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409619 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bbzns"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409549 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a618272-575b-49a0-983c-64ac1d55259a-audit-policies\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.409984 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhwpt\" (UniqueName: \"kubernetes.io/projected/139d2f9e-37ad-4a2e-9061-99e7592e68b4-kube-api-access-zhwpt\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410057 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a618272-575b-49a0-983c-64ac1d55259a-audit-dir\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410181 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a618272-575b-49a0-983c-64ac1d55259a-audit-dir\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410261 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe9221b2-2ee0-4fa7-846f-f37559bf8631-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410330 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e60eef75-abf0-4b05-94fc-430010bbe664-trusted-ca\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410403 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j44bk\" (UniqueName: \"kubernetes.io/projected/4a618272-575b-49a0-983c-64ac1d55259a-kube-api-access-j44bk\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410468 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvwvs\" (UniqueName: \"kubernetes.io/projected/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-kube-api-access-pvwvs\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410502 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-client-ca\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410526 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/139d2f9e-37ad-4a2e-9061-99e7592e68b4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410545 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdcjk\" (UniqueName: \"kubernetes.io/projected/fe9221b2-2ee0-4fa7-846f-f37559bf8631-kube-api-access-cdcjk\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410574 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-etcd-client\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410582 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-audit\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410603 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe9221b2-2ee0-4fa7-846f-f37559bf8631-config\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410607 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48979d33-9725-4066-819c-9b1f8f2c62a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.410770 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tm65\" (UniqueName: \"kubernetes.io/projected/c34a2d47-0bc4-4100-bd82-d2bf8e571129-kube-api-access-5tm65\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.411193 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/139d2f9e-37ad-4a2e-9061-99e7592e68b4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.411559 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-client-ca\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.412448 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.412819 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-encryption-config\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.412963 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-serving-cert\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.413281 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-etcd-client\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.413763 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/139d2f9e-37ad-4a2e-9061-99e7592e68b4-serving-cert\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.413801 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-bplkj"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.413835 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c34a2d47-0bc4-4100-bd82-d2bf8e571129-serving-cert\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.414609 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-x9mg5"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.415046 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.415312 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.415607 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.415891 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a618272-575b-49a0-983c-64ac1d55259a-serving-cert\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.416061 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe9221b2-2ee0-4fa7-846f-f37559bf8631-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.416175 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-encryption-config\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.416500 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.416569 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-etcd-client\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.418826 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.418966 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.418986 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.422157 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.422203 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.422215 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rfxm8"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.425409 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.425435 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.425444 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-x9mg5"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.426838 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bplkj"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.427794 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nbzmx"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.428761 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6bbnd"] Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.429382 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.439034 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.459429 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.479966 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.499419 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511446 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48979d33-9725-4066-819c-9b1f8f2c62a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511476 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-ca\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511511 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e60eef75-abf0-4b05-94fc-430010bbe664-trusted-ca\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511554 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48979d33-9725-4066-819c-9b1f8f2c62a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511588 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkjt7\" (UniqueName: \"kubernetes.io/projected/373c7218-dd5b-411d-bf82-94d13f4ca81a-kube-api-access-xkjt7\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511619 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-service-ca\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511648 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-client-ca\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511672 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e60eef75-abf0-4b05-94fc-430010bbe664-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511717 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnd4f\" (UniqueName: \"kubernetes.io/projected/e60eef75-abf0-4b05-94fc-430010bbe664-kube-api-access-jnd4f\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511767 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/373c7218-dd5b-411d-bf82-94d13f4ca81a-serving-cert\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511786 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vflq7\" (UniqueName: \"kubernetes.io/projected/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-kube-api-access-vflq7\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511802 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-config\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511816 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e60eef75-abf0-4b05-94fc-430010bbe664-metrics-tls\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511832 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-config\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511874 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-serving-cert\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511902 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-client\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.511919 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89zh9\" (UniqueName: \"kubernetes.io/projected/48979d33-9725-4066-819c-9b1f8f2c62a6-kube-api-access-89zh9\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.513057 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-service-ca\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.513907 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48979d33-9725-4066-819c-9b1f8f2c62a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.515221 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-config\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.515809 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e60eef75-abf0-4b05-94fc-430010bbe664-trusted-ca\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.516677 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-ca\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.517255 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-config\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.517266 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48979d33-9725-4066-819c-9b1f8f2c62a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.518033 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/373c7218-dd5b-411d-bf82-94d13f4ca81a-serving-cert\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.519659 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.520471 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-serving-cert\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.521948 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e60eef75-abf0-4b05-94fc-430010bbe664-metrics-tls\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.521978 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-client-ca\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.524094 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-etcd-client\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.540472 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.559220 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.578814 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.598848 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.618712 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.639876 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.659303 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.678808 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.699746 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.720980 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.740811 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.759194 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.779884 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.798793 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.819813 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.839204 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.859680 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.879660 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.899633 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.919477 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 05:43:41 crc kubenswrapper[4810]: I1203 05:43:41.939692 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.000051 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.019661 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.040710 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.066248 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.082589 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.106208 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.122079 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.131570 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.139519 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.165725 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.179772 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.199652 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.219654 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.240245 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.259628 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.279219 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.299806 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.320454 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.340309 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.357776 4810 request.go:700] Waited for 1.017807193s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator/secrets?fieldSelector=metadata.name%3Dkube-storage-version-migrator-sa-dockercfg-5xfcg&limit=500&resourceVersion=0 Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.359613 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.379818 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.399913 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.419596 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.439826 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.459517 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.481050 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.499800 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.519383 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.539053 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.559588 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.578975 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.599767 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.620803 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.640094 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.659348 4810 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.679500 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.699650 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.719867 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.740298 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.759201 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.779693 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.821339 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.824585 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.838960 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.859459 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.879562 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.908399 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.919213 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.939812 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.959615 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 05:43:42 crc kubenswrapper[4810]: I1203 05:43:42.979793 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.000123 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.033543 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhwpt\" (UniqueName: \"kubernetes.io/projected/139d2f9e-37ad-4a2e-9061-99e7592e68b4-kube-api-access-zhwpt\") pod \"openshift-config-operator-7777fb866f-q2w4p\" (UID: \"139d2f9e-37ad-4a2e-9061-99e7592e68b4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.054519 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j44bk\" (UniqueName: \"kubernetes.io/projected/4a618272-575b-49a0-983c-64ac1d55259a-kube-api-access-j44bk\") pod \"apiserver-7bbb656c7d-kxwd5\" (UID: \"4a618272-575b-49a0-983c-64ac1d55259a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.078699 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvwvs\" (UniqueName: \"kubernetes.io/projected/da0b2ad7-dc0d-40ea-945a-ff2e54543eb5-kube-api-access-pvwvs\") pod \"apiserver-76f77b778f-tp4fv\" (UID: \"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5\") " pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.082777 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.095758 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdcjk\" (UniqueName: \"kubernetes.io/projected/fe9221b2-2ee0-4fa7-846f-f37559bf8631-kube-api-access-cdcjk\") pod \"machine-api-operator-5694c8668f-fvwpx\" (UID: \"fe9221b2-2ee0-4fa7-846f-f37559bf8631\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.111869 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.118519 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tm65\" (UniqueName: \"kubernetes.io/projected/c34a2d47-0bc4-4100-bd82-d2bf8e571129-kube-api-access-5tm65\") pod \"controller-manager-879f6c89f-djbsd\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.119330 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.139849 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.165404 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.214674 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.216193 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.220444 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.239469 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.260126 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.280194 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.289585 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.300039 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.310816 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.316765 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.332573 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fvwpx"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.334256 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.334932 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e60eef75-abf0-4b05-94fc-430010bbe664-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.336890 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.337173 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:45:45.337134773 +0000 UTC m=+269.272595684 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: W1203 05:43:43.337435 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe9221b2_2ee0_4fa7_846f_f37559bf8631.slice/crio-7aa9398c3efe2247f07e37fc67e5b6888b0ff068e212b6b8e56077fa1f0f775e WatchSource:0}: Error finding container 7aa9398c3efe2247f07e37fc67e5b6888b0ff068e212b6b8e56077fa1f0f775e: Status 404 returned error can't find the container with id 7aa9398c3efe2247f07e37fc67e5b6888b0ff068e212b6b8e56077fa1f0f775e Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.352660 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnd4f\" (UniqueName: \"kubernetes.io/projected/e60eef75-abf0-4b05-94fc-430010bbe664-kube-api-access-jnd4f\") pod \"ingress-operator-5b745b69d9-zk295\" (UID: \"e60eef75-abf0-4b05-94fc-430010bbe664\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.358207 4810 request.go:700] Waited for 1.84346246s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/serviceaccounts/etcd-operator/token Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.374439 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vflq7\" (UniqueName: \"kubernetes.io/projected/8a7b72c6-af13-49cd-9b8d-3391a152cb7e-kube-api-access-vflq7\") pod \"etcd-operator-b45778765-m8wws\" (UID: \"8a7b72c6-af13-49cd-9b8d-3391a152cb7e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.396574 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89zh9\" (UniqueName: \"kubernetes.io/projected/48979d33-9725-4066-819c-9b1f8f2c62a6-kube-api-access-89zh9\") pod \"openshift-controller-manager-operator-756b6f6bc6-99nk7\" (UID: \"48979d33-9725-4066-819c-9b1f8f2c62a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.415609 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkjt7\" (UniqueName: \"kubernetes.io/projected/373c7218-dd5b-411d-bf82-94d13f4ca81a-kube-api-access-xkjt7\") pod \"route-controller-manager-6576b87f9c-dlmpd\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.427549 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438307 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-562v6\" (UniqueName: \"kubernetes.io/projected/fca02554-5b20-4ad8-b7a2-1172f7aa463c-kube-api-access-562v6\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438341 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/be6aaf2f-9432-4c1c-acf3-9e1a2ba26715-metrics-tls\") pod \"dns-operator-744455d44c-qhkvl\" (UID: \"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715\") " pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438362 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-trusted-ca-bundle\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438401 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438420 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t58fj\" (UniqueName: \"kubernetes.io/projected/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-kube-api-access-t58fj\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438439 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/554bceb2-197f-4ff9-98b8-166c1bbd51be-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438457 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pntg6\" (UniqueName: \"kubernetes.io/projected/fe488bc4-f284-4b37-b4f5-ca9cfae32ed7-kube-api-access-pntg6\") pod \"downloads-7954f5f757-2cbhj\" (UID: \"fe488bc4-f284-4b37-b4f5-ca9cfae32ed7\") " pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438475 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5vvl\" (UniqueName: \"kubernetes.io/projected/74356fdb-1912-4ce1-8adb-90ab820418b4-kube-api-access-r5vvl\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438490 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgdmp\" (UniqueName: \"kubernetes.io/projected/d773e027-f95d-450b-bacc-f30b1235784c-kube-api-access-vgdmp\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438531 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-service-ca-bundle\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438547 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-console-config\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438562 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6cd6\" (UniqueName: \"kubernetes.io/projected/be6aaf2f-9432-4c1c-acf3-9e1a2ba26715-kube-api-access-z6cd6\") pod \"dns-operator-744455d44c-qhkvl\" (UID: \"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715\") " pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438584 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/73510d8a-e4fc-4187-bb00-e4d9435c8d33-ca-trust-extracted\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438600 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996df972-3521-4a57-bb1e-bcca8f503fae-config\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438614 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-default-certificate\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438628 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-oauth-serving-cert\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438645 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57cng\" (UniqueName: \"kubernetes.io/projected/c50c167f-29c7-4a2a-a785-43c98ac454a2-kube-api-access-57cng\") pod \"cluster-samples-operator-665b6dd947-mm4xk\" (UID: \"c50c167f-29c7-4a2a-a785-43c98ac454a2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438673 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438690 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438706 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-machine-approver-tls\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438719 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-serving-cert\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438751 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d21b1057-e33d-4783-836e-afd16a18761d-profile-collector-cert\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438784 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fca02554-5b20-4ad8-b7a2-1172f7aa463c-service-ca-bundle\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438800 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/74356fdb-1912-4ce1-8adb-90ab820418b4-images\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438818 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k45mt\" (UniqueName: \"kubernetes.io/projected/554bceb2-197f-4ff9-98b8-166c1bbd51be-kube-api-access-k45mt\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438839 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438856 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80c40de0-8050-4343-80a0-8e6a80fe51e3-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438872 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74356fdb-1912-4ce1-8adb-90ab820418b4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438888 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hztqz\" (UniqueName: \"kubernetes.io/projected/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-kube-api-access-hztqz\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438902 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-trusted-ca\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438918 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.438937 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-config\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.439557 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:43.939543335 +0000 UTC m=+147.875004176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.439752 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-serving-cert\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.439929 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440004 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z96s\" (UniqueName: \"kubernetes.io/projected/80c40de0-8050-4343-80a0-8e6a80fe51e3-kube-api-access-6z96s\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440076 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-certificates\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440107 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-stats-auth\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440450 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-bound-sa-token\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440576 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-service-ca\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4b82d59-537d-4f03-b7a5-3185a9007138-serving-cert\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440810 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c40de0-8050-4343-80a0-8e6a80fe51e3-config\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440944 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d21b1057-e33d-4783-836e-afd16a18761d-srv-cert\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440971 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/73510d8a-e4fc-4187-bb00-e4d9435c8d33-installation-pull-secrets\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.440994 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-metrics-certs\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441019 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmht8\" (UniqueName: \"kubernetes.io/projected/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-kube-api-access-nmht8\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441044 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441129 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-auth-proxy-config\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441164 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5f8gn\" (UniqueName: \"kubernetes.io/projected/d21b1057-e33d-4783-836e-afd16a18761d-kube-api-access-5f8gn\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441206 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441262 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996df972-3521-4a57-bb1e-bcca8f503fae-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.441307 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442214 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lvdb\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-kube-api-access-4lvdb\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442257 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-config\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442296 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/554bceb2-197f-4ff9-98b8-166c1bbd51be-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442329 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-config\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442375 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c50c167f-29c7-4a2a-a785-43c98ac454a2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mm4xk\" (UID: \"c50c167f-29c7-4a2a-a785-43c98ac454a2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442423 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74356fdb-1912-4ce1-8adb-90ab820418b4-proxy-tls\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442454 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-oauth-config\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442488 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-tls\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442517 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bgxf\" (UniqueName: \"kubernetes.io/projected/c4b82d59-537d-4f03-b7a5-3185a9007138-kube-api-access-5bgxf\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442551 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/996df972-3521-4a57-bb1e-bcca8f503fae-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442616 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-config\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442647 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/554bceb2-197f-4ff9-98b8-166c1bbd51be-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442889 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-trusted-ca\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.442942 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.444018 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.446120 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.446578 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.446935 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.493829 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.495654 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-djbsd"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.514417 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.517696 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.531240 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543064 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543373 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.543581 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.043558515 +0000 UTC m=+147.979019356 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543613 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-signing-cabundle\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543651 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-plugins-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543671 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad5d90c7-93e5-402d-b0fe-5df98f530899-apiservice-cert\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543686 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-csi-data-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543704 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t58fj\" (UniqueName: \"kubernetes.io/projected/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-kube-api-access-t58fj\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543720 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-secret-volume\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543754 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-socket-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543790 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-signing-key\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543839 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pntg6\" (UniqueName: \"kubernetes.io/projected/fe488bc4-f284-4b37-b4f5-ca9cfae32ed7-kube-api-access-pntg6\") pod \"downloads-7954f5f757-2cbhj\" (UID: \"fe488bc4-f284-4b37-b4f5-ca9cfae32ed7\") " pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543858 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5vvl\" (UniqueName: \"kubernetes.io/projected/74356fdb-1912-4ce1-8adb-90ab820418b4-kube-api-access-r5vvl\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543875 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwt6w\" (UniqueName: \"kubernetes.io/projected/d79446bb-f6e3-49a1-8322-39872afb23a0-kube-api-access-pwt6w\") pod \"ingress-canary-x9mg5\" (UID: \"d79446bb-f6e3-49a1-8322-39872afb23a0\") " pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543892 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-service-ca-bundle\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543931 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-console-config\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543950 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-494ft\" (UniqueName: \"kubernetes.io/projected/1ce3a2f1-cdd9-4fb0-89a5-512215b1f657-kube-api-access-494ft\") pod \"package-server-manager-789f6589d5-wjfz7\" (UID: \"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543973 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/73510d8a-e4fc-4187-bb00-e4d9435c8d33-ca-trust-extracted\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.543993 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-default-certificate\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544011 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57cng\" (UniqueName: \"kubernetes.io/projected/c50c167f-29c7-4a2a-a785-43c98ac454a2-kube-api-access-57cng\") pod \"cluster-samples-operator-665b6dd947-mm4xk\" (UID: \"c50c167f-29c7-4a2a-a785-43c98ac454a2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544038 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544055 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544073 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-serving-cert\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544092 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544109 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544142 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544171 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fca02554-5b20-4ad8-b7a2-1172f7aa463c-service-ca-bundle\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544191 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80c40de0-8050-4343-80a0-8e6a80fe51e3-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544208 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74356fdb-1912-4ce1-8adb-90ab820418b4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544226 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drgcv\" (UniqueName: \"kubernetes.io/projected/ad5d90c7-93e5-402d-b0fe-5df98f530899-kube-api-access-drgcv\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544247 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544264 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm4cr\" (UniqueName: \"kubernetes.io/projected/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-kube-api-access-mm4cr\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5d38389-215a-4200-949f-f2204bdf2228-config\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544296 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b1b3285c-273c-43f1-a7ea-c32fce92ece9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jj69p\" (UID: \"b1b3285c-273c-43f1-a7ea-c32fce92ece9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544319 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544338 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-trusted-ca\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.544367 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tp4fv"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.545231 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/73510d8a-e4fc-4187-bb00-e4d9435c8d33-ca-trust-extracted\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.546286 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74356fdb-1912-4ce1-8adb-90ab820418b4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547013 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547043 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-config\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547082 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2w7x\" (UniqueName: \"kubernetes.io/projected/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-kube-api-access-f2w7x\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547103 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547151 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-serving-cert\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547169 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-certs\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547187 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-certificates\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547231 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ad5d90c7-93e5-402d-b0fe-5df98f530899-tmpfs\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547259 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-service-ca\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547283 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c40de0-8050-4343-80a0-8e6a80fe51e3-config\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547301 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d79446bb-f6e3-49a1-8322-39872afb23a0-cert\") pod \"ingress-canary-x9mg5\" (UID: \"d79446bb-f6e3-49a1-8322-39872afb23a0\") " pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547319 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/530c5657-3703-41d8-9b52-c5f0ec8ce941-profile-collector-cert\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547350 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4b82d59-537d-4f03-b7a5-3185a9007138-serving-cert\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547377 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/73510d8a-e4fc-4187-bb00-e4d9435c8d33-installation-pull-secrets\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547757 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-metrics-certs\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547778 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmht8\" (UniqueName: \"kubernetes.io/projected/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-kube-api-access-nmht8\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547804 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08826aba-4e18-46a3-9685-a1d20cd2684e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547823 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5d38389-215a-4200-949f-f2204bdf2228-serving-cert\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547843 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e11bccd1-94c0-4366-9420-6295008b823d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wgxmb\" (UID: \"e11bccd1-94c0-4366-9420-6295008b823d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547871 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996df972-3521-4a57-bb1e-bcca8f503fae-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547887 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-auth-proxy-config\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547894 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fca02554-5b20-4ad8-b7a2-1172f7aa463c-service-ca-bundle\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547905 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69j65\" (UniqueName: \"kubernetes.io/projected/530c5657-3703-41d8-9b52-c5f0ec8ce941-kube-api-access-69j65\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547958 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547984 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad5d90c7-93e5-402d-b0fe-5df98f530899-webhook-cert\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548004 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548034 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lvdb\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-kube-api-access-4lvdb\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548058 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/554bceb2-197f-4ff9-98b8-166c1bbd51be-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548099 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-config\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548100 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548122 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c50c167f-29c7-4a2a-a785-43c98ac454a2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mm4xk\" (UID: \"c50c167f-29c7-4a2a-a785-43c98ac454a2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548149 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-tls\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548169 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bgxf\" (UniqueName: \"kubernetes.io/projected/c4b82d59-537d-4f03-b7a5-3185a9007138-kube-api-access-5bgxf\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548188 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74356fdb-1912-4ce1-8adb-90ab820418b4-proxy-tls\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548209 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-service-ca\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548223 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-metrics-tls\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548461 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c40de0-8050-4343-80a0-8e6a80fe51e3-config\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.548624 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.549039 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-trusted-ca\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.549062 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-config\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.547445 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-service-ca-bundle\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.550617 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.552379 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-default-certificate\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.553034 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80c40de0-8050-4343-80a0-8e6a80fe51e3-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.553411 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-certificates\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.553776 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-config\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.553970 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-serving-cert\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.554681 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-console-config\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.554951 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-metrics-certs\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.555130 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/73510d8a-e4fc-4187-bb00-e4d9435c8d33-installation-pull-secrets\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.555658 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4b82d59-537d-4f03-b7a5-3185a9007138-serving-cert\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.555711 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-config\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.555752 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/554bceb2-197f-4ff9-98b8-166c1bbd51be-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.555778 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6dph\" (UniqueName: \"kubernetes.io/projected/2f676559-93f9-4af5-9079-2d1edaa8862f-kube-api-access-r6dph\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.556035 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-auth-proxy-config\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.556336 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-serving-cert\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.556599 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-config\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.556783 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/530c5657-3703-41d8-9b52-c5f0ec8ce941-srv-cert\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.556817 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.556892 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-proxy-tls\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560160 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c50c167f-29c7-4a2a-a785-43c98ac454a2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mm4xk\" (UID: \"c50c167f-29c7-4a2a-a785-43c98ac454a2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560183 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08826aba-4e18-46a3-9685-a1d20cd2684e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.559897 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74356fdb-1912-4ce1-8adb-90ab820418b4-proxy-tls\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.559926 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/554bceb2-197f-4ff9-98b8-166c1bbd51be-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.557624 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-tls\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.559450 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996df972-3521-4a57-bb1e-bcca8f503fae-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560292 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-trusted-ca\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560347 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pq46\" (UniqueName: \"kubernetes.io/projected/ab1ed666-8ebc-4549-af10-a2547bd44f9c-kube-api-access-7pq46\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560365 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-trusted-ca-bundle\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560400 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-562v6\" (UniqueName: \"kubernetes.io/projected/fca02554-5b20-4ad8-b7a2-1172f7aa463c-kube-api-access-562v6\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560417 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/be6aaf2f-9432-4c1c-acf3-9e1a2ba26715-metrics-tls\") pod \"dns-operator-744455d44c-qhkvl\" (UID: \"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715\") " pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560449 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560469 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-config-volume\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560501 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xf9p\" (UniqueName: \"kubernetes.io/projected/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-kube-api-access-2xf9p\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560522 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/554bceb2-197f-4ff9-98b8-166c1bbd51be-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560538 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-registration-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560555 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgdmp\" (UniqueName: \"kubernetes.io/projected/d773e027-f95d-450b-bacc-f30b1235784c-kube-api-access-vgdmp\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560577 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6cd6\" (UniqueName: \"kubernetes.io/projected/be6aaf2f-9432-4c1c-acf3-9e1a2ba26715-kube-api-access-z6cd6\") pod \"dns-operator-744455d44c-qhkvl\" (UID: \"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715\") " pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560602 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-node-bootstrap-token\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560621 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-oauth-serving-cert\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560638 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08826aba-4e18-46a3-9685-a1d20cd2684e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560657 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996df972-3521-4a57-bb1e-bcca8f503fae-config\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560689 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-machine-approver-tls\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560708 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d21b1057-e33d-4783-836e-afd16a18761d-profile-collector-cert\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560754 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/74356fdb-1912-4ce1-8adb-90ab820418b4-images\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560775 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k45mt\" (UniqueName: \"kubernetes.io/projected/554bceb2-197f-4ff9-98b8-166c1bbd51be-kube-api-access-k45mt\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560798 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-mountpoint-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560831 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hztqz\" (UniqueName: \"kubernetes.io/projected/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-kube-api-access-hztqz\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560848 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-dir\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560870 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560889 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5zct\" (UniqueName: \"kubernetes.io/projected/16fa1024-0d69-4a97-8b3f-172a6591c81a-kube-api-access-h5zct\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.560913 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z96s\" (UniqueName: \"kubernetes.io/projected/80c40de0-8050-4343-80a0-8e6a80fe51e3-kube-api-access-6z96s\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561070 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nplkc\" (UniqueName: \"kubernetes.io/projected/c1c6390f-b265-4b4a-b2dc-2dd110b7ce95-kube-api-access-nplkc\") pod \"migrator-59844c95c7-szx7t\" (UID: \"c1c6390f-b265-4b4a-b2dc-2dd110b7ce95\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561188 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-config-volume\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561211 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-stats-auth\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561231 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-bound-sa-token\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561262 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9f75\" (UniqueName: \"kubernetes.io/projected/e11bccd1-94c0-4366-9420-6295008b823d-kube-api-access-q9f75\") pod \"control-plane-machine-set-operator-78cbb6b69f-wgxmb\" (UID: \"e11bccd1-94c0-4366-9420-6295008b823d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561302 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d21b1057-e33d-4783-836e-afd16a18761d-srv-cert\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561324 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.561340 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5f8gn\" (UniqueName: \"kubernetes.io/projected/d21b1057-e33d-4783-836e-afd16a18761d-kube-api-access-5f8gn\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.561434 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.061416535 +0000 UTC m=+147.996877366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563075 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-config\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563117 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lmf7\" (UniqueName: \"kubernetes.io/projected/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-kube-api-access-8lmf7\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563139 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563193 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563229 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-oauth-config\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563277 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-policies\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563296 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmb7c\" (UniqueName: \"kubernetes.io/projected/b1b3285c-273c-43f1-a7ea-c32fce92ece9-kube-api-access-hmb7c\") pod \"multus-admission-controller-857f4d67dd-jj69p\" (UID: \"b1b3285c-273c-43f1-a7ea-c32fce92ece9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563319 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/996df972-3521-4a57-bb1e-bcca8f503fae-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563337 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1ce3a2f1-cdd9-4fb0-89a5-512215b1f657-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-wjfz7\" (UID: \"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563360 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9xm7\" (UniqueName: \"kubernetes.io/projected/f5d38389-215a-4200-949f-f2204bdf2228-kube-api-access-s9xm7\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563381 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwxsc\" (UniqueName: \"kubernetes.io/projected/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-kube-api-access-zwxsc\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563403 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563507 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.563542 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.564002 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996df972-3521-4a57-bb1e-bcca8f503fae-config\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.564568 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/554bceb2-197f-4ff9-98b8-166c1bbd51be-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.565668 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/74356fdb-1912-4ce1-8adb-90ab820418b4-images\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.567540 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.567765 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.567884 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-machine-approver-tls\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.569129 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fca02554-5b20-4ad8-b7a2-1172f7aa463c-stats-auth\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.569155 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-oauth-serving-cert\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.569763 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b82d59-537d-4f03-b7a5-3185a9007138-config\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.570245 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-trusted-ca\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.570657 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-oauth-config\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.570694 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/be6aaf2f-9432-4c1c-acf3-9e1a2ba26715-metrics-tls\") pod \"dns-operator-744455d44c-qhkvl\" (UID: \"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715\") " pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.571541 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-trusted-ca-bundle\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: W1203 05:43:43.576934 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda0b2ad7_dc0d_40ea_945a_ff2e54543eb5.slice/crio-c57dcbdc88908bbbb61748f392fdead486bbf59514c6ccac684ebdf7ba96675e WatchSource:0}: Error finding container c57dcbdc88908bbbb61748f392fdead486bbf59514c6ccac684ebdf7ba96675e: Status 404 returned error can't find the container with id c57dcbdc88908bbbb61748f392fdead486bbf59514c6ccac684ebdf7ba96675e Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.577555 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d21b1057-e33d-4783-836e-afd16a18761d-srv-cert\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.578465 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e6b1231-8db8-4bbe-8059-9a42cd390fd0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zfbsr\" (UID: \"0e6b1231-8db8-4bbe-8059-9a42cd390fd0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.579136 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d21b1057-e33d-4783-836e-afd16a18761d-profile-collector-cert\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.589482 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.589909 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.599761 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pntg6\" (UniqueName: \"kubernetes.io/projected/fe488bc4-f284-4b37-b4f5-ca9cfae32ed7-kube-api-access-pntg6\") pod \"downloads-7954f5f757-2cbhj\" (UID: \"fe488bc4-f284-4b37-b4f5-ca9cfae32ed7\") " pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.615234 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5vvl\" (UniqueName: \"kubernetes.io/projected/74356fdb-1912-4ce1-8adb-90ab820418b4-kube-api-access-r5vvl\") pod \"machine-config-operator-74547568cd-5j6v4\" (UID: \"74356fdb-1912-4ce1-8adb-90ab820418b4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.619043 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.641429 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57cng\" (UniqueName: \"kubernetes.io/projected/c50c167f-29c7-4a2a-a785-43c98ac454a2-kube-api-access-57cng\") pod \"cluster-samples-operator-665b6dd947-mm4xk\" (UID: \"c50c167f-29c7-4a2a-a785-43c98ac454a2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.660712 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665186 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665467 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pq46\" (UniqueName: \"kubernetes.io/projected/ab1ed666-8ebc-4549-af10-a2547bd44f9c-kube-api-access-7pq46\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665510 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-config-volume\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665535 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xf9p\" (UniqueName: \"kubernetes.io/projected/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-kube-api-access-2xf9p\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665557 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-registration-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.665653 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.16561591 +0000 UTC m=+148.101076751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665694 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-node-bootstrap-token\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.665885 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-registration-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666368 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08826aba-4e18-46a3-9685-a1d20cd2684e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666413 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-mountpoint-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666436 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-dir\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666455 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666475 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5zct\" (UniqueName: \"kubernetes.io/projected/16fa1024-0d69-4a97-8b3f-172a6591c81a-kube-api-access-h5zct\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666531 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nplkc\" (UniqueName: \"kubernetes.io/projected/c1c6390f-b265-4b4a-b2dc-2dd110b7ce95-kube-api-access-nplkc\") pod \"migrator-59844c95c7-szx7t\" (UID: \"c1c6390f-b265-4b4a-b2dc-2dd110b7ce95\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666549 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-config-volume\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666612 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9f75\" (UniqueName: \"kubernetes.io/projected/e11bccd1-94c0-4366-9420-6295008b823d-kube-api-access-q9f75\") pod \"control-plane-machine-set-operator-78cbb6b69f-wgxmb\" (UID: \"e11bccd1-94c0-4366-9420-6295008b823d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666672 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lmf7\" (UniqueName: \"kubernetes.io/projected/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-kube-api-access-8lmf7\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666694 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666718 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.666749 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-policies\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667012 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmb7c\" (UniqueName: \"kubernetes.io/projected/b1b3285c-273c-43f1-a7ea-c32fce92ece9-kube-api-access-hmb7c\") pod \"multus-admission-controller-857f4d67dd-jj69p\" (UID: \"b1b3285c-273c-43f1-a7ea-c32fce92ece9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667048 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1ce3a2f1-cdd9-4fb0-89a5-512215b1f657-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-wjfz7\" (UID: \"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667068 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwxsc\" (UniqueName: \"kubernetes.io/projected/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-kube-api-access-zwxsc\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667086 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667103 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9xm7\" (UniqueName: \"kubernetes.io/projected/f5d38389-215a-4200-949f-f2204bdf2228-kube-api-access-s9xm7\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667126 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667191 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667220 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-signing-cabundle\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667246 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-plugins-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667270 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad5d90c7-93e5-402d-b0fe-5df98f530899-apiservice-cert\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667289 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-csi-data-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667341 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-secret-volume\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667402 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-socket-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667415 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-config-volume\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667422 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-signing-key\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.667500 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwt6w\" (UniqueName: \"kubernetes.io/projected/d79446bb-f6e3-49a1-8322-39872afb23a0-kube-api-access-pwt6w\") pod \"ingress-canary-x9mg5\" (UID: \"d79446bb-f6e3-49a1-8322-39872afb23a0\") " pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670704 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-494ft\" (UniqueName: \"kubernetes.io/projected/1ce3a2f1-cdd9-4fb0-89a5-512215b1f657-kube-api-access-494ft\") pod \"package-server-manager-789f6589d5-wjfz7\" (UID: \"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670777 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670816 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670842 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670885 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drgcv\" (UniqueName: \"kubernetes.io/projected/ad5d90c7-93e5-402d-b0fe-5df98f530899-kube-api-access-drgcv\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670912 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm4cr\" (UniqueName: \"kubernetes.io/projected/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-kube-api-access-mm4cr\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670938 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5d38389-215a-4200-949f-f2204bdf2228-config\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670968 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b1b3285c-273c-43f1-a7ea-c32fce92ece9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jj69p\" (UID: \"b1b3285c-273c-43f1-a7ea-c32fce92ece9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.670996 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671029 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2w7x\" (UniqueName: \"kubernetes.io/projected/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-kube-api-access-f2w7x\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671052 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671076 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-certs\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671102 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ad5d90c7-93e5-402d-b0fe-5df98f530899-tmpfs\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671141 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d79446bb-f6e3-49a1-8322-39872afb23a0-cert\") pod \"ingress-canary-x9mg5\" (UID: \"d79446bb-f6e3-49a1-8322-39872afb23a0\") " pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671167 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/530c5657-3703-41d8-9b52-c5f0ec8ce941-profile-collector-cert\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671212 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5d38389-215a-4200-949f-f2204bdf2228-serving-cert\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671238 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08826aba-4e18-46a3-9685-a1d20cd2684e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69j65\" (UniqueName: \"kubernetes.io/projected/530c5657-3703-41d8-9b52-c5f0ec8ce941-kube-api-access-69j65\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671287 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e11bccd1-94c0-4366-9420-6295008b823d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wgxmb\" (UID: \"e11bccd1-94c0-4366-9420-6295008b823d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671314 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad5d90c7-93e5-402d-b0fe-5df98f530899-webhook-cert\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671335 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671357 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671424 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-metrics-tls\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671448 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6dph\" (UniqueName: \"kubernetes.io/projected/2f676559-93f9-4af5-9079-2d1edaa8862f-kube-api-access-r6dph\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671488 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/530c5657-3703-41d8-9b52-c5f0ec8ce941-srv-cert\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671511 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671534 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-proxy-tls\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.671558 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08826aba-4e18-46a3-9685-a1d20cd2684e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.672406 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08826aba-4e18-46a3-9685-a1d20cd2684e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.672770 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-signing-cabundle\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.672925 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-csi-data-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.675856 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-socket-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.675932 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.676115 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-mountpoint-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.676126 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ab1ed666-8ebc-4549-af10-a2547bd44f9c-plugins-dir\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.676153 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-dir\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.676706 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.677913 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.678232 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ad5d90c7-93e5-402d-b0fe-5df98f530899-tmpfs\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.678383 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-signing-key\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.680473 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.681959 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.681985 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5d38389-215a-4200-949f-f2204bdf2228-config\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.682278 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b1b3285c-273c-43f1-a7ea-c32fce92ece9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jj69p\" (UID: \"b1b3285c-273c-43f1-a7ea-c32fce92ece9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.682834 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad5d90c7-93e5-402d-b0fe-5df98f530899-apiservice-cert\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.683171 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.683932 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5d38389-215a-4200-949f-f2204bdf2228-serving-cert\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.684281 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.686810 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.686930 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t58fj\" (UniqueName: \"kubernetes.io/projected/7fe37a0b-aa47-497b-a3ce-6fd80ae79120-kube-api-access-t58fj\") pod \"machine-approver-56656f9798-xb9rx\" (UID: \"7fe37a0b-aa47-497b-a3ce-6fd80ae79120\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.688718 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1ce3a2f1-cdd9-4fb0-89a5-512215b1f657-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-wjfz7\" (UID: \"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.689302 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-config-volume\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.689894 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-policies\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.690205 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad5d90c7-93e5-402d-b0fe-5df98f530899-webhook-cert\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.691112 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-certs\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.691117 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d79446bb-f6e3-49a1-8322-39872afb23a0-cert\") pod \"ingress-canary-x9mg5\" (UID: \"d79446bb-f6e3-49a1-8322-39872afb23a0\") " pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.692253 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08826aba-4e18-46a3-9685-a1d20cd2684e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.692717 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/530c5657-3703-41d8-9b52-c5f0ec8ce941-srv-cert\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.693891 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e11bccd1-94c0-4366-9420-6295008b823d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wgxmb\" (UID: \"e11bccd1-94c0-4366-9420-6295008b823d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.694289 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.694455 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.695167 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.695449 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-metrics-tls\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.696170 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-secret-volume\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.697080 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-proxy-tls\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.698388 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/530c5657-3703-41d8-9b52-c5f0ec8ce941-profile-collector-cert\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.674706 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.706285 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-node-bootstrap-token\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.709383 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.709433 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.709676 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lvdb\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-kube-api-access-4lvdb\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.715211 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/554bceb2-197f-4ff9-98b8-166c1bbd51be-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.732763 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bgxf\" (UniqueName: \"kubernetes.io/projected/c4b82d59-537d-4f03-b7a5-3185a9007138-kube-api-access-5bgxf\") pod \"authentication-operator-69f744f599-bbzns\" (UID: \"c4b82d59-537d-4f03-b7a5-3185a9007138\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.756804 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmht8\" (UniqueName: \"kubernetes.io/projected/4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6-kube-api-access-nmht8\") pod \"kube-storage-version-migrator-operator-b67b599dd-g4xbz\" (UID: \"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.763955 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.774925 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.775422 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.275395873 +0000 UTC m=+148.210856784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.780490 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgdmp\" (UniqueName: \"kubernetes.io/projected/d773e027-f95d-450b-bacc-f30b1235784c-kube-api-access-vgdmp\") pod \"console-f9d7485db-fjrzc\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.796495 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-562v6\" (UniqueName: \"kubernetes.io/projected/fca02554-5b20-4ad8-b7a2-1172f7aa463c-kube-api-access-562v6\") pod \"router-default-5444994796-zqmjf\" (UID: \"fca02554-5b20-4ad8-b7a2-1172f7aa463c\") " pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.802428 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.812657 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.817489 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5f8gn\" (UniqueName: \"kubernetes.io/projected/d21b1057-e33d-4783-836e-afd16a18761d-kube-api-access-5f8gn\") pod \"catalog-operator-68c6474976-kf4q4\" (UID: \"d21b1057-e33d-4783-836e-afd16a18761d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.847263 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k45mt\" (UniqueName: \"kubernetes.io/projected/554bceb2-197f-4ff9-98b8-166c1bbd51be-kube-api-access-k45mt\") pod \"cluster-image-registry-operator-dc59b4c8b-fq8hz\" (UID: \"554bceb2-197f-4ff9-98b8-166c1bbd51be\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.848060 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.874117 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-bound-sa-token\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.877169 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.377139102 +0000 UTC m=+148.312599943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.878771 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.879256 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.879888 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.379880269 +0000 UTC m=+148.315341110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.880827 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.898275 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.899978 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6cd6\" (UniqueName: \"kubernetes.io/projected/be6aaf2f-9432-4c1c-acf3-9e1a2ba26715-kube-api-access-z6cd6\") pod \"dns-operator-744455d44c-qhkvl\" (UID: \"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715\") " pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.910649 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.917499 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/996df972-3521-4a57-bb1e-bcca8f503fae-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8vlvm\" (UID: \"996df972-3521-4a57-bb1e-bcca8f503fae\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.919682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hztqz\" (UniqueName: \"kubernetes.io/projected/294d2e90-9869-4e4c-85d9-6ca4cdd857c1-kube-api-access-hztqz\") pod \"console-operator-58897d9998-b8ps5\" (UID: \"294d2e90-9869-4e4c-85d9-6ca4cdd857c1\") " pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.923607 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.942043 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z96s\" (UniqueName: \"kubernetes.io/projected/80c40de0-8050-4343-80a0-8e6a80fe51e3-kube-api-access-6z96s\") pod \"openshift-apiserver-operator-796bbdcf4f-27vrp\" (UID: \"80c40de0-8050-4343-80a0-8e6a80fe51e3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.947145 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.958720 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zk295"] Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.980335 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:43 crc kubenswrapper[4810]: E1203 05:43:43.980844 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.48082483 +0000 UTC m=+148.416285671 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:43 crc kubenswrapper[4810]: I1203 05:43:43.982222 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pq46\" (UniqueName: \"kubernetes.io/projected/ab1ed666-8ebc-4549-af10-a2547bd44f9c-kube-api-access-7pq46\") pod \"csi-hostpathplugin-rfxm8\" (UID: \"ab1ed666-8ebc-4549-af10-a2547bd44f9c\") " pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:43.995968 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m8wws"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.000517 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xf9p\" (UniqueName: \"kubernetes.io/projected/e8236d56-ff0b-44c0-88cf-97d63bf30a2c-kube-api-access-2xf9p\") pod \"machine-config-controller-84d6567774-9vqxr\" (UID: \"e8236d56-ff0b-44c0-88cf-97d63bf30a2c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.036618 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.043446 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwxsc\" (UniqueName: \"kubernetes.io/projected/0673ec3f-2f79-47c1-b23a-0cc3327f2bb8-kube-api-access-zwxsc\") pod \"dns-default-bplkj\" (UID: \"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8\") " pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.056484 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.060719 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5zct\" (UniqueName: \"kubernetes.io/projected/16fa1024-0d69-4a97-8b3f-172a6591c81a-kube-api-access-h5zct\") pod \"marketplace-operator-79b997595-22pk2\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.071344 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwt6w\" (UniqueName: \"kubernetes.io/projected/d79446bb-f6e3-49a1-8322-39872afb23a0-kube-api-access-pwt6w\") pod \"ingress-canary-x9mg5\" (UID: \"d79446bb-f6e3-49a1-8322-39872afb23a0\") " pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.073339 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.073380 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.075258 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.080112 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.082320 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.082668 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.582650362 +0000 UTC m=+148.518111193 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.091860 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-x9mg5" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.092076 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.092571 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.099043 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nplkc\" (UniqueName: \"kubernetes.io/projected/c1c6390f-b265-4b4a-b2dc-2dd110b7ce95-kube-api-access-nplkc\") pod \"migrator-59844c95c7-szx7t\" (UID: \"c1c6390f-b265-4b4a-b2dc-2dd110b7ce95\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.115591 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-494ft\" (UniqueName: \"kubernetes.io/projected/1ce3a2f1-cdd9-4fb0-89a5-512215b1f657-kube-api-access-494ft\") pod \"package-server-manager-789f6589d5-wjfz7\" (UID: \"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.116506 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9xm7\" (UniqueName: \"kubernetes.io/projected/f5d38389-215a-4200-949f-f2204bdf2228-kube-api-access-s9xm7\") pod \"service-ca-operator-777779d784-tqmtc\" (UID: \"f5d38389-215a-4200-949f-f2204bdf2228\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.137647 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9f75\" (UniqueName: \"kubernetes.io/projected/e11bccd1-94c0-4366-9420-6295008b823d-kube-api-access-q9f75\") pod \"control-plane-machine-set-operator-78cbb6b69f-wgxmb\" (UID: \"e11bccd1-94c0-4366-9420-6295008b823d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.158026 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lmf7\" (UniqueName: \"kubernetes.io/projected/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-kube-api-access-8lmf7\") pod \"collect-profiles-29412330-ltx4k\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.172611 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.173364 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08826aba-4e18-46a3-9685-a1d20cd2684e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rr2wk\" (UID: \"08826aba-4e18-46a3-9685-a1d20cd2684e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.186888 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.187298 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.687275773 +0000 UTC m=+148.622736614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.210853 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69j65\" (UniqueName: \"kubernetes.io/projected/530c5657-3703-41d8-9b52-c5f0ec8ce941-kube-api-access-69j65\") pod \"olm-operator-6b444d44fb-h9458\" (UID: \"530c5657-3703-41d8-9b52-c5f0ec8ce941\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.221538 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmb7c\" (UniqueName: \"kubernetes.io/projected/b1b3285c-273c-43f1-a7ea-c32fce92ece9-kube-api-access-hmb7c\") pod \"multus-admission-controller-857f4d67dd-jj69p\" (UID: \"b1b3285c-273c-43f1-a7ea-c32fce92ece9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:44 crc kubenswrapper[4810]: W1203 05:43:44.230458 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74356fdb_1912_4ce1_8adb_90ab820418b4.slice/crio-e8f2606cbfb775c31a7e18a785d71a83e796660b96662c71e11aff3da055ec3e WatchSource:0}: Error finding container e8f2606cbfb775c31a7e18a785d71a83e796660b96662c71e11aff3da055ec3e: Status 404 returned error can't find the container with id e8f2606cbfb775c31a7e18a785d71a83e796660b96662c71e11aff3da055ec3e Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.243434 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6dph\" (UniqueName: \"kubernetes.io/projected/2f676559-93f9-4af5-9079-2d1edaa8862f-kube-api-access-r6dph\") pod \"oauth-openshift-558db77b4-nbzmx\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.261168 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.266395 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.273230 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2w7x\" (UniqueName: \"kubernetes.io/projected/32ec7edc-8805-46d2-ac93-ecd76d6dc57a-kube-api-access-f2w7x\") pod \"service-ca-9c57cc56f-g7l7x\" (UID: \"32ec7edc-8805-46d2-ac93-ecd76d6dc57a\") " pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.273627 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.285926 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.288663 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.289267 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.289688 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.789671365 +0000 UTC m=+148.725132206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.293137 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm4cr\" (UniqueName: \"kubernetes.io/projected/04d81c76-1eb6-4a9c-a689-9cfd89ff103c-kube-api-access-mm4cr\") pod \"machine-config-server-6bbnd\" (UID: \"04d81c76-1eb6-4a9c-a689-9cfd89ff103c\") " pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.306961 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.312971 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.316571 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drgcv\" (UniqueName: \"kubernetes.io/projected/ad5d90c7-93e5-402d-b0fe-5df98f530899-kube-api-access-drgcv\") pod \"packageserver-d55dfcdfc-rrbcq\" (UID: \"ad5d90c7-93e5-402d-b0fe-5df98f530899\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.317156 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zqmjf" event={"ID":"fca02554-5b20-4ad8-b7a2-1172f7aa463c","Type":"ContainerStarted","Data":"dbc5cbbddf8d963c61f27d10bb9f7b68c32993800c41ebdf0bf85ee55112311d"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.319287 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.325004 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" event={"ID":"373c7218-dd5b-411d-bf82-94d13f4ca81a","Type":"ContainerStarted","Data":"0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.325051 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" event={"ID":"373c7218-dd5b-411d-bf82-94d13f4ca81a","Type":"ContainerStarted","Data":"3755d609aefd01d6f7d92e1d73d81795485846bc67b5eb326704e2b8202075ed"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.326188 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.326899 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.329823 4810 generic.go:334] "Generic (PLEG): container finished" podID="139d2f9e-37ad-4a2e-9061-99e7592e68b4" containerID="2161e10a3ad9d42db22a89c1afac43a64f3708c812ce6b95d9e4d08961e144d5" exitCode=0 Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.330744 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" event={"ID":"139d2f9e-37ad-4a2e-9061-99e7592e68b4","Type":"ContainerDied","Data":"2161e10a3ad9d42db22a89c1afac43a64f3708c812ce6b95d9e4d08961e144d5"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.330770 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" event={"ID":"139d2f9e-37ad-4a2e-9061-99e7592e68b4","Type":"ContainerStarted","Data":"e916e3d064ac90eae0eb8e476ef7348fd8ed3d89ae981cb145ca4b72fbe3380a"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.337308 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.338276 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" event={"ID":"74356fdb-1912-4ce1-8adb-90ab820418b4","Type":"ContainerStarted","Data":"e8f2606cbfb775c31a7e18a785d71a83e796660b96662c71e11aff3da055ec3e"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.339496 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" event={"ID":"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5","Type":"ContainerStarted","Data":"c57dcbdc88908bbbb61748f392fdead486bbf59514c6ccac684ebdf7ba96675e"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.340344 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" event={"ID":"c34a2d47-0bc4-4100-bd82-d2bf8e571129","Type":"ContainerStarted","Data":"526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.340364 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" event={"ID":"c34a2d47-0bc4-4100-bd82-d2bf8e571129","Type":"ContainerStarted","Data":"75ce807d7eb0e57a913560a8a9e8955867b5275af0f542692e5e7c7ad598010f"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.341030 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.343426 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" event={"ID":"48979d33-9725-4066-819c-9b1f8f2c62a6","Type":"ContainerStarted","Data":"a2c4d7216a90744cf0072e4b89605fe98a687a89fabdab627b33b49df369a1ec"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.343761 4810 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-djbsd container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.343808 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.343803 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" podUID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.345413 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" event={"ID":"e60eef75-abf0-4b05-94fc-430010bbe664","Type":"ContainerStarted","Data":"c0d62bde71af403986d8407d4a8026a948e03b1adac07c631640d17120c9b9dc"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.350827 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" event={"ID":"fe9221b2-2ee0-4fa7-846f-f37559bf8631","Type":"ContainerStarted","Data":"ebcad19c027a91239eef3b39a01001a0b7e9578c98c6ae6d9290ae3d2569a337"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.350873 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" event={"ID":"fe9221b2-2ee0-4fa7-846f-f37559bf8631","Type":"ContainerStarted","Data":"6090b988ccddedcb1f3e29bc24f8445452a0095f84db4cc53757c8c0daec1541"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.350882 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" event={"ID":"fe9221b2-2ee0-4fa7-846f-f37559bf8631","Type":"ContainerStarted","Data":"7aa9398c3efe2247f07e37fc67e5b6888b0ff068e212b6b8e56077fa1f0f775e"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.352998 4810 generic.go:334] "Generic (PLEG): container finished" podID="4a618272-575b-49a0-983c-64ac1d55259a" containerID="f45d360abecdd2abadabcadcf024055b957fd04c729b5ddf674e6ec0f7e02c96" exitCode=0 Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.353072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" event={"ID":"4a618272-575b-49a0-983c-64ac1d55259a","Type":"ContainerDied","Data":"f45d360abecdd2abadabcadcf024055b957fd04c729b5ddf674e6ec0f7e02c96"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.353389 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" event={"ID":"4a618272-575b-49a0-983c-64ac1d55259a","Type":"ContainerStarted","Data":"813073ab01c5c27927c0950b81fc93fe67feb7f7b59adcbaa66bd9b84ef4976d"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.356179 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" event={"ID":"7fe37a0b-aa47-497b-a3ce-6fd80ae79120","Type":"ContainerStarted","Data":"27b4fecf76ba1e8ef23ec730f21c8e99e5768ff93087a23953334c94ebe66bf5"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.358565 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"eab06a33f19a58e329196cacbef0bbe72948a32331a34de50a8b25e4193b3819"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.370362 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" event={"ID":"8a7b72c6-af13-49cd-9b8d-3391a152cb7e","Type":"ContainerStarted","Data":"b2d6473e673e8625225bdc6c66496e07da1adef9dd86f69e8a1b7629aa47d9a4"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.375530 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.390197 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.390524 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.890490631 +0000 UTC m=+148.825951472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.391207 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.393584 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.89355821 +0000 UTC m=+148.829019051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.396407 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c34fbbd01b4ed5830d87a0b89f50290b9c84c74c44934a68ec973fce81d43018"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.396586 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" event={"ID":"0e6b1231-8db8-4bbe-8059-9a42cd390fd0","Type":"ContainerStarted","Data":"1c9df40a45ba01ef4a1ad19f102596d0c7f54ebb240572434779dd49e1eeed06"} Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.398075 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6bbnd" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.495392 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.497210 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:44.997172915 +0000 UTC m=+148.932633756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.499558 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bbzns"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.500704 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2cbhj"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.592611 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-fjrzc"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.596069 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.597916 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.598480 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.605472 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.098392506 +0000 UTC m=+149.033853347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.627984 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.667051 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-x9mg5"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.700644 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.701597 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.201537494 +0000 UTC m=+149.136998345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.704376 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.704800 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.204783149 +0000 UTC m=+149.140243990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.725854 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-qhkvl"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.732913 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz"] Dec 03 05:43:44 crc kubenswrapper[4810]: W1203 05:43:44.741588 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80c40de0_8050_4343_80a0_8e6a80fe51e3.slice/crio-a258fd17ce5343d9d4495cbb18e25b1acb543d7f3bd74882c14d76f487b69a2c WatchSource:0}: Error finding container a258fd17ce5343d9d4495cbb18e25b1acb543d7f3bd74882c14d76f487b69a2c: Status 404 returned error can't find the container with id a258fd17ce5343d9d4495cbb18e25b1acb543d7f3bd74882c14d76f487b69a2c Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.745107 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.805366 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.805547 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.305513452 +0000 UTC m=+149.240974293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.805696 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.806084 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.306066902 +0000 UTC m=+149.241527743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.888721 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.894771 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rfxm8"] Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.907266 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.907523 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.40749321 +0000 UTC m=+149.342954051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:44 crc kubenswrapper[4810]: I1203 05:43:44.907696 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:44 crc kubenswrapper[4810]: E1203 05:43:44.908056 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.408047759 +0000 UTC m=+149.343508600 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.009589 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.009967 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.509949084 +0000 UTC m=+149.445409925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.115622 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.116399 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.616386139 +0000 UTC m=+149.551846980 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.219569 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.219950 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.719928091 +0000 UTC m=+149.655388932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.320822 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.321618 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.821599378 +0000 UTC m=+149.757060219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.421524 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.422503 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:45.922478686 +0000 UTC m=+149.857939868 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.465104 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" event={"ID":"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715","Type":"ContainerStarted","Data":"46b42a3414a72c935efd41935cb765ae5dd63f6451a48d1c4663b30f3191fd75"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.476241 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" event={"ID":"996df972-3521-4a57-bb1e-bcca8f503fae","Type":"ContainerStarted","Data":"63c05842aae1fc7755246ed4cb70cd4222b67cf2f6990fdb9ce6ecd9faa76b40"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.495448 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"424a7686f8102016165c79a024ff2b258949b7e6ff3ab3312665f0e36c4321c6"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.498581 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz"] Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.507858 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" podStartSLOduration=129.507841948 podStartE2EDuration="2m9.507841948s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:45.505750784 +0000 UTC m=+149.441211625" watchObservedRunningTime="2025-12-03 05:43:45.507841948 +0000 UTC m=+149.443302789" Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.512177 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b8ps5"] Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.516235 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4"] Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.522704 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bplkj"] Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.523284 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.523618 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.023604994 +0000 UTC m=+149.959065835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.532258 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fvwpx" podStartSLOduration=129.532233988 podStartE2EDuration="2m9.532233988s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:45.5300091 +0000 UTC m=+149.465469951" watchObservedRunningTime="2025-12-03 05:43:45.532233988 +0000 UTC m=+149.467694829" Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.576605 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2cbhj" event={"ID":"fe488bc4-f284-4b37-b4f5-ca9cfae32ed7","Type":"ContainerStarted","Data":"b91772c74968d248f94cfbdb15959635bea6f5a79089ef5164d1891fcc9e5a21"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.611998 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" event={"ID":"c4b82d59-537d-4f03-b7a5-3185a9007138","Type":"ContainerStarted","Data":"16fdf4997330f7ec663338ecb9cb7096a937252ab724663e7d25ba43277d0e10"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.624433 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.624853 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.124833125 +0000 UTC m=+150.060293966 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.687156 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0531ea99b2e76842370376aa13acbd085a15888472e746c770d7eb0b945d4af2"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.695477 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" podStartSLOduration=129.695440006 podStartE2EDuration="2m9.695440006s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:45.694835384 +0000 UTC m=+149.630296225" watchObservedRunningTime="2025-12-03 05:43:45.695440006 +0000 UTC m=+149.630900847" Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.714072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" event={"ID":"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6","Type":"ContainerStarted","Data":"3e4401b593a2076c51300ea28db35a605017d6d10cb5626027ec5660276fddc1"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.725833 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fjrzc" event={"ID":"d773e027-f95d-450b-bacc-f30b1235784c","Type":"ContainerStarted","Data":"274e808343343184b3732b92c4ec03ba4d5149c477cdbf39026d1b136ed832db"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.726325 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.726995 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.226982848 +0000 UTC m=+150.162443689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.755585 4810 generic.go:334] "Generic (PLEG): container finished" podID="da0b2ad7-dc0d-40ea-945a-ff2e54543eb5" containerID="7dccd499fe5c2c25535910b1538ace91416ca18d4d690a1b1d56166bc1cbdf58" exitCode=0 Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.755687 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" event={"ID":"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5","Type":"ContainerDied","Data":"7dccd499fe5c2c25535910b1538ace91416ca18d4d690a1b1d56166bc1cbdf58"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.773607 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zqmjf" event={"ID":"fca02554-5b20-4ad8-b7a2-1172f7aa463c","Type":"ContainerStarted","Data":"fd945deff59259f795f19299aa5bd168d48fe788cb9acb768407ae1974c9ca04"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.778563 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-x9mg5" event={"ID":"d79446bb-f6e3-49a1-8322-39872afb23a0","Type":"ContainerStarted","Data":"bf8c81fed2b73e78eb1ad47f8e5fe2a68165140248753ad087b7d9e7a1655579"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.797520 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" event={"ID":"80c40de0-8050-4343-80a0-8e6a80fe51e3","Type":"ContainerStarted","Data":"a258fd17ce5343d9d4495cbb18e25b1acb543d7f3bd74882c14d76f487b69a2c"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.809295 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" event={"ID":"ab1ed666-8ebc-4549-af10-a2547bd44f9c","Type":"ContainerStarted","Data":"dfc20a7274ebd70bf74d62962b2c3e5d4dd7d86c8e87916e5751f31a1ac93f1e"} Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.819275 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.829536 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.830859 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.330839321 +0000 UTC m=+150.266300162 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.898969 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:45 crc kubenswrapper[4810]: I1203 05:43:45.932663 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:45 crc kubenswrapper[4810]: E1203 05:43:45.936942 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.436924793 +0000 UTC m=+150.372385634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.035173 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.035610 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.535587154 +0000 UTC m=+150.471047995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.113898 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" podStartSLOduration=130.113877736 podStartE2EDuration="2m10.113877736s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:46.111606116 +0000 UTC m=+150.047066957" watchObservedRunningTime="2025-12-03 05:43:46.113877736 +0000 UTC m=+150.049338577" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.145459 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.145827 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.645807262 +0000 UTC m=+150.581268103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.217353 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-zqmjf" podStartSLOduration=130.217330865 podStartE2EDuration="2m10.217330865s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:46.216183055 +0000 UTC m=+150.151643896" watchObservedRunningTime="2025-12-03 05:43:46.217330865 +0000 UTC m=+150.152791706" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.248325 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.248813 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.748791325 +0000 UTC m=+150.684252166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.290117 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:46 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:46 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:46 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.290524 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.352001 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.352386 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.852371159 +0000 UTC m=+150.787832000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.473037 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.473510 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:46.973490552 +0000 UTC m=+150.908951393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.499369 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.516718 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.530240 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.574812 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.575178 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.075164818 +0000 UTC m=+151.010625659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.679906 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.680342 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.180325038 +0000 UTC m=+151.115785899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.722245 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-22pk2"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.781394 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.781996 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.281973474 +0000 UTC m=+151.217434325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.834475 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.850312 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" event={"ID":"0e6b1231-8db8-4bbe-8059-9a42cd390fd0","Type":"ContainerStarted","Data":"46980068ab227e6fdb782d23effa0cc5c62fe6d0e7be69f5eb41183ad16d7b90"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.861274 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" event={"ID":"139d2f9e-37ad-4a2e-9061-99e7592e68b4","Type":"ContainerStarted","Data":"0d92e742d64d5fa5d62da0a51709b8f31f8f504fc27ed5d028001c0fb4a46e59"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.863348 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.864828 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.866993 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.874497 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-99nk7" event={"ID":"48979d33-9725-4066-819c-9b1f8f2c62a6","Type":"ContainerStarted","Data":"0e96eceef6452a6b5d4a17b837df5834243994e1bf08b153f83dfafe29f36af2"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.878677 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jj69p"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.882278 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zfbsr" podStartSLOduration=130.882253911 podStartE2EDuration="2m10.882253911s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:46.872691404 +0000 UTC m=+150.808152245" watchObservedRunningTime="2025-12-03 05:43:46.882253911 +0000 UTC m=+150.817714752" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.884042 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.884754 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.384708988 +0000 UTC m=+151.320169829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.888496 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6bbnd" event={"ID":"04d81c76-1eb6-4a9c-a689-9cfd89ff103c","Type":"ContainerStarted","Data":"21eeff08eb439fc664dfdc8bed0e83f1edeb22c0e51df1b6875619320876767a"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.926366 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.935907 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:46 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:46 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:46 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.935977 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.940990 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" event={"ID":"c50c167f-29c7-4a2a-a785-43c98ac454a2","Type":"ContainerStarted","Data":"4263a1b26bb4bd11170803c348806d1802ffc1cd03bcd41de5f15e9f17a9b777"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.941250 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" podStartSLOduration=131.941218271 podStartE2EDuration="2m11.941218271s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:46.933957395 +0000 UTC m=+150.869418236" watchObservedRunningTime="2025-12-03 05:43:46.941218271 +0000 UTC m=+150.876679112" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.942580 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.955383 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7f4287604b4426859748d85c6cb1be881144374791d37e941d2591f11ed804cf"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.956013 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.957609 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g7l7x"] Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.970902 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" event={"ID":"554bceb2-197f-4ff9-98b8-166c1bbd51be","Type":"ContainerStarted","Data":"7650cee0fb90092c37469a298bcbbe6bbd016b419926d4245ce194aa5c1191a6"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.972983 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" event={"ID":"294d2e90-9869-4e4c-85d9-6ca4cdd857c1","Type":"ContainerStarted","Data":"261d276959ce736900aa602435652d95ea9d73b9ed5bc313847f0fcd2f881b00"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.980078 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bplkj" event={"ID":"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8","Type":"ContainerStarted","Data":"a5bdc7db0306534b8d443a6ec2486224cace8a2763d4db3c9af642ad6496770c"} Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.992147 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.992195 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" event={"ID":"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657","Type":"ContainerStarted","Data":"5c1628b4448418e5f03ab7febc4f58e93ac19fb9e47ae2fb99f158885f3fb6cf"} Dec 03 05:43:46 crc kubenswrapper[4810]: E1203 05:43:46.992459 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.492446359 +0000 UTC m=+151.427907190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:46 crc kubenswrapper[4810]: I1203 05:43:46.994854 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" event={"ID":"7fe37a0b-aa47-497b-a3ce-6fd80ae79120","Type":"ContainerStarted","Data":"5d151b9417aba7608ac42e8dcb89151f35df06e2a0f9358b3ca9998028c9e238"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.002917 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" event={"ID":"4a618272-575b-49a0-983c-64ac1d55259a","Type":"ContainerStarted","Data":"0bd2468d7a6f60ed5f3955b21ea6bf341fa1c73186cb6bd6e4109d5fa80fe0d9"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.003894 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" event={"ID":"d21b1057-e33d-4783-836e-afd16a18761d","Type":"ContainerStarted","Data":"26dd8f6cd9a42bba45ffeded2f52ee127014786027b69c77cfaa8176f1877f29"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.004789 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" event={"ID":"530c5657-3703-41d8-9b52-c5f0ec8ce941","Type":"ContainerStarted","Data":"f31d1ab47dac9ed0c478c397257e0bd787b0674d4209dc59541c59e2c04a95fe"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.024980 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fjrzc" event={"ID":"d773e027-f95d-450b-bacc-f30b1235784c","Type":"ContainerStarted","Data":"b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.052095 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" event={"ID":"8a7b72c6-af13-49cd-9b8d-3391a152cb7e","Type":"ContainerStarted","Data":"2b0d682320fe45740b6e37f38bc657660296a2ff59a753f8023213b97b7d39c2"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.072423 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" podStartSLOduration=131.072401379 podStartE2EDuration="2m11.072401379s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:47.071909452 +0000 UTC m=+151.007370283" watchObservedRunningTime="2025-12-03 05:43:47.072401379 +0000 UTC m=+151.007862210" Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.086324 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"580e8493abd5b037f35be0cc2690fc881a579813eab505a9e7c38677fac81633"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.086466 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nbzmx"] Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.096990 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.097325 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.597306598 +0000 UTC m=+151.532767439 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.097469 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.098563 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.598541211 +0000 UTC m=+151.534002122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.100969 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k"] Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.140316 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" event={"ID":"e60eef75-abf0-4b05-94fc-430010bbe664","Type":"ContainerStarted","Data":"5e534bf12889276d0747cad073190531b518a2692679071bc15a1258b0174dc6"} Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.169177 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-m8wws" podStartSLOduration=131.169159882 podStartE2EDuration="2m11.169159882s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:47.119578723 +0000 UTC m=+151.055039564" watchObservedRunningTime="2025-12-03 05:43:47.169159882 +0000 UTC m=+151.104620723" Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.199462 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.199822 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.699789243 +0000 UTC m=+151.635250094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.203468 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.208329 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-fjrzc" podStartSLOduration=132.208310784 podStartE2EDuration="2m12.208310784s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:47.169246975 +0000 UTC m=+151.104707816" watchObservedRunningTime="2025-12-03 05:43:47.208310784 +0000 UTC m=+151.143771635" Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.209342 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.709323089 +0000 UTC m=+151.644783930 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.308042 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.308420 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.808401404 +0000 UTC m=+151.743862245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.411426 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.411752 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:47.911724829 +0000 UTC m=+151.847185660 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.512455 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.513330 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.013308113 +0000 UTC m=+151.948768954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.614858 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.615228 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.115214168 +0000 UTC m=+152.050674999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.736291 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.737112 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.237093097 +0000 UTC m=+152.172553938 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.838370 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.838692 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.338679211 +0000 UTC m=+152.274140052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.911913 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:47 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:47 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:47 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.913413 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:47 crc kubenswrapper[4810]: I1203 05:43:47.952772 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:47 crc kubenswrapper[4810]: E1203 05:43:47.953243 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.453216831 +0000 UTC m=+152.388677672 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.059316 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.059629 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.559615715 +0000 UTC m=+152.495076556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.083840 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.087451 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.117238 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.167083 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.167289 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.667254882 +0000 UTC m=+152.602715733 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.167414 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.167999 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.667987998 +0000 UTC m=+152.603448839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.181331 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" event={"ID":"08826aba-4e18-46a3-9685-a1d20cd2684e","Type":"ContainerStarted","Data":"ae1a82cc42389e949cc6e62e23a805701adeac8798267f6d89e7eb977690d8a8"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.191794 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" event={"ID":"80c40de0-8050-4343-80a0-8e6a80fe51e3","Type":"ContainerStarted","Data":"eee1024a571dd8d2d5095f63118c50c64767b0bdd7cc9a20ed2422eae16a970f"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.216448 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-27vrp" podStartSLOduration=133.216413766 podStartE2EDuration="2m13.216413766s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.213919758 +0000 UTC m=+152.149380599" watchObservedRunningTime="2025-12-03 05:43:48.216413766 +0000 UTC m=+152.151874597" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.217287 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" event={"ID":"d21b1057-e33d-4783-836e-afd16a18761d","Type":"ContainerStarted","Data":"d9e929baeb63996764ed3ce25d95e1a84f4b09562dd8ed83b183d95ae7e9c63a"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.217821 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.223969 4810 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-kf4q4 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.224039 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" podUID="d21b1057-e33d-4783-836e-afd16a18761d" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.234708 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" event={"ID":"554bceb2-197f-4ff9-98b8-166c1bbd51be","Type":"ContainerStarted","Data":"26173767661a309f370c258b8e7882ec89db734d9b5d78bc6f45beedf37f8ff2"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.237570 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" podStartSLOduration=132.237555992 podStartE2EDuration="2m12.237555992s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.236118201 +0000 UTC m=+152.171579042" watchObservedRunningTime="2025-12-03 05:43:48.237555992 +0000 UTC m=+152.173016833" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.257929 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-x9mg5" event={"ID":"d79446bb-f6e3-49a1-8322-39872afb23a0","Type":"ContainerStarted","Data":"3d84200da66a16c460449a018ff430d251cbbd5d3108f571f1e81688c7c09d7d"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.266104 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fq8hz" podStartSLOduration=132.266081588 podStartE2EDuration="2m12.266081588s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.263956443 +0000 UTC m=+152.199417284" watchObservedRunningTime="2025-12-03 05:43:48.266081588 +0000 UTC m=+152.201542429" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.268229 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" event={"ID":"c4b82d59-537d-4f03-b7a5-3185a9007138","Type":"ContainerStarted","Data":"131a535d86b7eca8723b53c994263ebf6f2ee067da24e120e135800a002c95e6"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.268748 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.269890 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.769866652 +0000 UTC m=+152.705327503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.274160 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" event={"ID":"530c5657-3703-41d8-9b52-c5f0ec8ce941","Type":"ContainerStarted","Data":"1ca0a96518ec7d8837fd7aed6aa67529fa6b35796a2a29a0397d49f201fc5c5a"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.274867 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.284319 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" event={"ID":"ab1ed666-8ebc-4549-af10-a2547bd44f9c","Type":"ContainerStarted","Data":"06debd9c555181825163492303b90b639fa2e6bb92112a173132f6e22d56ef60"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.288542 4810 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-h9458 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.288588 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" podUID="530c5657-3703-41d8-9b52-c5f0ec8ce941" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.290265 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-x9mg5" podStartSLOduration=7.29023454 podStartE2EDuration="7.29023454s" podCreationTimestamp="2025-12-03 05:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.290070464 +0000 UTC m=+152.225531305" watchObservedRunningTime="2025-12-03 05:43:48.29023454 +0000 UTC m=+152.225695371" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.304941 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" event={"ID":"f5d38389-215a-4200-949f-f2204bdf2228","Type":"ContainerStarted","Data":"81606dc32df14f163ee3c7dc9cad096f84d83e1f125daf8620661ab111726b6e"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.315419 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" event={"ID":"996df972-3521-4a57-bb1e-bcca8f503fae","Type":"ContainerStarted","Data":"3df0162789e04afb312e45ecac93f09a2f53d2354ea58e5f42d524da5e510d5d"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.318338 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" event={"ID":"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5","Type":"ContainerStarted","Data":"553574d65d393b5534882b52572aa6208f1cb4c0114f4a7f0b77dae877e82d0b"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.332791 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" podStartSLOduration=132.332766501 podStartE2EDuration="2m12.332766501s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.313847803 +0000 UTC m=+152.249308654" watchObservedRunningTime="2025-12-03 05:43:48.332766501 +0000 UTC m=+152.268227362" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.341825 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-bbzns" podStartSLOduration=133.341800259 podStartE2EDuration="2m13.341800259s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.341234639 +0000 UTC m=+152.276695490" watchObservedRunningTime="2025-12-03 05:43:48.341800259 +0000 UTC m=+152.277261100" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.344988 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" event={"ID":"c1c6390f-b265-4b4a-b2dc-2dd110b7ce95","Type":"ContainerStarted","Data":"b5cd26bc45e8b6f9ebb456ccb3a9371866173a5f8c23edde9730460928580afb"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.371351 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.375797 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.875782458 +0000 UTC m=+152.811243299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.376148 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" podStartSLOduration=132.3761201 podStartE2EDuration="2m12.3761201s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.374942108 +0000 UTC m=+152.310402949" watchObservedRunningTime="2025-12-03 05:43:48.3761201 +0000 UTC m=+152.311580941" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.376704 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bplkj" event={"ID":"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8","Type":"ContainerStarted","Data":"d12b898c98cc62abb7ac59785470284e9bc0c33344e201828eac335fcc49ceeb"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.395124 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8vlvm" podStartSLOduration=132.39510152 podStartE2EDuration="2m12.39510152s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.39427734 +0000 UTC m=+152.329738171" watchObservedRunningTime="2025-12-03 05:43:48.39510152 +0000 UTC m=+152.330562361" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.406661 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" event={"ID":"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03","Type":"ContainerStarted","Data":"8fc8dba3ad266dc8ccc256813c60a7d3dd765296d224de2842871c78d19ab012"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.406716 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" event={"ID":"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03","Type":"ContainerStarted","Data":"8e3ace54d49149f44a91e559af4fe364f72fa7d219c867a8744a39ef77153e4a"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.428368 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2cbhj" event={"ID":"fe488bc4-f284-4b37-b4f5-ca9cfae32ed7","Type":"ContainerStarted","Data":"432a4f8b08451c18647e3e36ab9fed2436f7af908626a02a5c0d88c5fe494ad2"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.429320 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.430829 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-2cbhj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.430868 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2cbhj" podUID="fe488bc4-f284-4b37-b4f5-ca9cfae32ed7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.437159 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" event={"ID":"74356fdb-1912-4ce1-8adb-90ab820418b4","Type":"ContainerStarted","Data":"6094e1b2d842ccce5a9c26347a1235e59ae60680354cd32cab600ae5fa7b0448"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.454139 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" podStartSLOduration=132.454119421 podStartE2EDuration="2m12.454119421s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.453533951 +0000 UTC m=+152.388994792" watchObservedRunningTime="2025-12-03 05:43:48.454119421 +0000 UTC m=+152.389580262" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.461505 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" event={"ID":"32ec7edc-8805-46d2-ac93-ecd76d6dc57a","Type":"ContainerStarted","Data":"ba922a327c975cf2fb61467bf901e8c72fc745c12440739fd2aad4e792e28a96"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.473550 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.475394 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:48.975366061 +0000 UTC m=+152.910826902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.493784 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2cbhj" podStartSLOduration=133.493740949 podStartE2EDuration="2m13.493740949s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.482012455 +0000 UTC m=+152.417473296" watchObservedRunningTime="2025-12-03 05:43:48.493740949 +0000 UTC m=+152.429201790" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.501153 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" event={"ID":"e11bccd1-94c0-4366-9420-6295008b823d","Type":"ContainerStarted","Data":"572697744d2308c477c840cf713e23472dc4a32bec2df02fdf934f260d02fea9"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.504649 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" event={"ID":"2f676559-93f9-4af5-9079-2d1edaa8862f","Type":"ContainerStarted","Data":"d7a7ec13cf6d510bb5f25ae28a8ff6ff3fd3f71f0e7a88137154ee0971b16e7f"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.516923 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" podStartSLOduration=132.516893936 podStartE2EDuration="2m12.516893936s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.505032838 +0000 UTC m=+152.440493679" watchObservedRunningTime="2025-12-03 05:43:48.516893936 +0000 UTC m=+152.452354777" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.524181 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" event={"ID":"c50c167f-29c7-4a2a-a785-43c98ac454a2","Type":"ContainerStarted","Data":"0f01a8c7f405ac09f2918583ef4aaccc6d93300d55979fc359f35e1f4def0121"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.574889 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.576438 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.076425206 +0000 UTC m=+153.011886037 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.614197 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" event={"ID":"e60eef75-abf0-4b05-94fc-430010bbe664","Type":"ContainerStarted","Data":"cb655741c6df6358c98e5d0bd97123290c18f28696ae4af440198a97cab7f7d9"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.640129 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" podStartSLOduration=133.640108352 podStartE2EDuration="2m13.640108352s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.563290543 +0000 UTC m=+152.498751384" watchObservedRunningTime="2025-12-03 05:43:48.640108352 +0000 UTC m=+152.575569193" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.680693 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.680933 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.180891441 +0000 UTC m=+153.116352282 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.681340 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.682942 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.182915513 +0000 UTC m=+153.118376354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.683269 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" event={"ID":"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657","Type":"ContainerStarted","Data":"5ccbc15d141afcca9d8620ad2a90f9df36656acd0f63d54022456612b7694964"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.767317 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" event={"ID":"7fe37a0b-aa47-497b-a3ce-6fd80ae79120","Type":"ContainerStarted","Data":"2646ac671b6e185d49b9f3e788e35003fa819187b20321d0dc9996147b8f3405"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.789672 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.789966 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.289946428 +0000 UTC m=+153.225407269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.803291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" event={"ID":"b1b3285c-273c-43f1-a7ea-c32fce92ece9","Type":"ContainerStarted","Data":"b1e382ca05d6d811fb5df3aafe596c6808d9eb345350af8b09df5fb012d96795"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.815303 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zk295" podStartSLOduration=132.815283582 podStartE2EDuration="2m12.815283582s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.650056854 +0000 UTC m=+152.585517695" watchObservedRunningTime="2025-12-03 05:43:48.815283582 +0000 UTC m=+152.750744423" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.817504 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xb9rx" podStartSLOduration=133.81749333 podStartE2EDuration="2m13.81749333s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.813916204 +0000 UTC m=+152.749377045" watchObservedRunningTime="2025-12-03 05:43:48.81749333 +0000 UTC m=+152.752954161" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.850837 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" event={"ID":"ad5d90c7-93e5-402d-b0fe-5df98f530899","Type":"ContainerStarted","Data":"46a57e3a23f7463dc5a7c12f2ad63556733099b4cf5965a62db80950c99a504b"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.852034 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.854426 4810 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-rrbcq container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.854472 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" podUID="ad5d90c7-93e5-402d-b0fe-5df98f530899" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.876840 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" event={"ID":"294d2e90-9869-4e4c-85d9-6ca4cdd857c1","Type":"ContainerStarted","Data":"f1ff675f487ef6a393ab9f9a0c308276a65d8f67609e216d50776709b1c2b88e"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.877491 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.878597 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" event={"ID":"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715","Type":"ContainerStarted","Data":"9ae95b22c64c04a5b60f63fd686503c4450f09b1dc3c4ae28acfa19a9cb925e1"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.879524 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6bbnd" event={"ID":"04d81c76-1eb6-4a9c-a689-9cfd89ff103c","Type":"ContainerStarted","Data":"ab67fbdc3792c79ed552f04e3c017083cede17805243a0c6283bf951c88d5914"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.892593 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:48 crc kubenswrapper[4810]: E1203 05:43:48.894443 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.394431524 +0000 UTC m=+153.329892365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.896412 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" event={"ID":"4b6ac5e0-3f7c-4bd0-84af-3159adc3b0a6","Type":"ContainerStarted","Data":"88a4376428257de89ccee3b9e0ae660f38c5b0c7b3e3119bc741448ecd3e97a1"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.910904 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" podStartSLOduration=132.910881104 podStartE2EDuration="2m12.910881104s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.885675585 +0000 UTC m=+152.821136426" watchObservedRunningTime="2025-12-03 05:43:48.910881104 +0000 UTC m=+152.846341945" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.911037 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6bbnd" podStartSLOduration=7.91103418 podStartE2EDuration="7.91103418s" podCreationTimestamp="2025-12-03 05:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.910074166 +0000 UTC m=+152.845534997" watchObservedRunningTime="2025-12-03 05:43:48.91103418 +0000 UTC m=+152.846495021" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.914709 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" event={"ID":"16fa1024-0d69-4a97-8b3f-172a6591c81a","Type":"ContainerStarted","Data":"ea14a6044b5073a9ce04946750bf9ae58f057206e2ddfb8f0b135d0f35a2a678"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.914764 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" event={"ID":"16fa1024-0d69-4a97-8b3f-172a6591c81a","Type":"ContainerStarted","Data":"ed9a2cb814a3d5bcae1ecba385258b4f24ac1a0d0f601b99b065ee183a2da5df"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.915228 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.915351 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:48 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:48 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:48 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.915383 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.952253 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" podStartSLOduration=133.952235023 podStartE2EDuration="2m13.952235023s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.950213252 +0000 UTC m=+152.885674093" watchObservedRunningTime="2025-12-03 05:43:48.952235023 +0000 UTC m=+152.887695864" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.954037 4810 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-22pk2 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.954116 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.984869 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" event={"ID":"e8236d56-ff0b-44c0-88cf-97d63bf30a2c","Type":"ContainerStarted","Data":"97f2631b310c4cb7aa820bc757d1338d20958413e6f3d34961644d8b329a87f2"} Dec 03 05:43:48 crc kubenswrapper[4810]: I1203 05:43:48.984908 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" event={"ID":"e8236d56-ff0b-44c0-88cf-97d63bf30a2c","Type":"ContainerStarted","Data":"6c3c7713255de2887c0c0d375e8697f6239c9303ff7330460f9b17e3920b83c7"} Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.004015 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.004199 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.504172696 +0000 UTC m=+153.439633537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.004383 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.005379 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.505372198 +0000 UTC m=+153.440833039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.020441 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" podStartSLOduration=133.020424889 podStartE2EDuration="2m13.020424889s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:48.979158633 +0000 UTC m=+152.914619474" watchObservedRunningTime="2025-12-03 05:43:49.020424889 +0000 UTC m=+152.955885730" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.024004 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kxwd5" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.053566 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-g4xbz" podStartSLOduration=133.053540197 podStartE2EDuration="2m13.053540197s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:49.022924007 +0000 UTC m=+152.958384848" watchObservedRunningTime="2025-12-03 05:43:49.053540197 +0000 UTC m=+152.989001038" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.053665 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" podStartSLOduration=133.053659631 podStartE2EDuration="2m13.053659631s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:49.047159172 +0000 UTC m=+152.982620013" watchObservedRunningTime="2025-12-03 05:43:49.053659631 +0000 UTC m=+152.989120472" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.106080 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.107905 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.607871104 +0000 UTC m=+153.543331945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.207538 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.207929 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.707914823 +0000 UTC m=+153.643375664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.308294 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.308622 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.808603585 +0000 UTC m=+153.744064416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.330418 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q2w4p" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.345563 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-b8ps5" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.409514 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.409983 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:49.90996994 +0000 UTC m=+153.845430771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.510611 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.010585389 +0000 UTC m=+153.946046230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.510613 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.510952 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.511221 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.011214831 +0000 UTC m=+153.946675672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.611622 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.611767 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.111748438 +0000 UTC m=+154.047209279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.611846 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.612172 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.112163212 +0000 UTC m=+154.047624053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.713079 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.713223 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.213187816 +0000 UTC m=+154.148648647 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.713779 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.714071 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.214057637 +0000 UTC m=+154.149518478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.814489 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.814711 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.314674696 +0000 UTC m=+154.250135537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.848170 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mlr8j"] Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.849062 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.852186 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.872045 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mlr8j"] Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.905511 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:49 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:49 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:49 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.905581 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.915679 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-catalog-content\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.915760 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.915791 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-utilities\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.915815 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f97lg\" (UniqueName: \"kubernetes.io/projected/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-kube-api-access-f97lg\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:49 crc kubenswrapper[4810]: E1203 05:43:49.916134 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.416120685 +0000 UTC m=+154.351581516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.992533 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mm4xk" event={"ID":"c50c167f-29c7-4a2a-a785-43c98ac454a2","Type":"ContainerStarted","Data":"15d6c0c4b6721d0b7b94ab932438fe3017a4b6932fce46f50f020a85d06483b9"} Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.996637 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bplkj" event={"ID":"0673ec3f-2f79-47c1-b23a-0cc3327f2bb8","Type":"ContainerStarted","Data":"1c947a73b005b6f4d69cbe1fce343fae89847b71d54b34e163874cff7083d6d9"} Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.996705 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:49 crc kubenswrapper[4810]: I1203 05:43:49.998261 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9vqxr" event={"ID":"e8236d56-ff0b-44c0-88cf-97d63bf30a2c","Type":"ContainerStarted","Data":"80760add655b10ab6b8cff958271bae4f57d60f018e5e7f534220c98d951dc17"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.001126 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" event={"ID":"da0b2ad7-dc0d-40ea-945a-ff2e54543eb5","Type":"ContainerStarted","Data":"ad8b8174d9337af3b19a84d66c2088aff816de4c504ef2be84c633df39989dd6"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.003211 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" event={"ID":"be6aaf2f-9432-4c1c-acf3-9e1a2ba26715","Type":"ContainerStarted","Data":"49a770bc22b572cb9b8d7f448e3cd054e89656612aed740dcbd08db42a0308b7"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.004354 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" event={"ID":"2f676559-93f9-4af5-9079-2d1edaa8862f","Type":"ContainerStarted","Data":"2929e5132c69e48f307226eeba8936498e253f8b451b8f6e4bac51f40a12d71a"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.005083 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.007131 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" event={"ID":"ad5d90c7-93e5-402d-b0fe-5df98f530899","Type":"ContainerStarted","Data":"115d52c3baeed94f13d832e17ebc24740ca3759b3ce22a2fecf5b96f897067b3"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.009064 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" event={"ID":"08826aba-4e18-46a3-9685-a1d20cd2684e","Type":"ContainerStarted","Data":"fe8f57b783f6b4f7fdf79126a9ead521d0118709621afb60adddc4ebec6f59bc"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.010185 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" event={"ID":"e11bccd1-94c0-4366-9420-6295008b823d","Type":"ContainerStarted","Data":"f225fba46dfa4dc23cc297a348b86bc7f6fd1dafa0e79337f74bfad4b25bacb4"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.011600 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" event={"ID":"1ce3a2f1-cdd9-4fb0-89a5-512215b1f657","Type":"ContainerStarted","Data":"d9e3ddb8334cc7b1f787fd00c3b554c358426771d5dd7911b14062311aea91d7"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.011943 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.012998 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" event={"ID":"c1c6390f-b265-4b4a-b2dc-2dd110b7ce95","Type":"ContainerStarted","Data":"8c2b4ec3a6bcd3516e13d1f5ad8aa2d4ce8e5e39462ed05dcadc08c4d5c33082"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.013019 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" event={"ID":"c1c6390f-b265-4b4a-b2dc-2dd110b7ce95","Type":"ContainerStarted","Data":"b2e33c29071f8a5944601b49409cc0855070ff5421df7117f485e0b9baa30997"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.014468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" event={"ID":"ab1ed666-8ebc-4549-af10-a2547bd44f9c","Type":"ContainerStarted","Data":"ace7f56aa5bb0ebe2c37396a88f4d308c7a04bf93b5af9d103ff835db75df1a6"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.015661 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" event={"ID":"b1b3285c-273c-43f1-a7ea-c32fce92ece9","Type":"ContainerStarted","Data":"196f6281f6a7ab29200392a900b8a3c3a1f28c24be98c1c0a1231f4d351391e6"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.015683 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" event={"ID":"b1b3285c-273c-43f1-a7ea-c32fce92ece9","Type":"ContainerStarted","Data":"dcc82b442799883093d682b0a65807e38f254ed96775a2c7633576e062be743f"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.016268 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.016538 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f97lg\" (UniqueName: \"kubernetes.io/projected/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-kube-api-access-f97lg\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.016586 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-catalog-content\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.016643 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-utilities\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.017064 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-utilities\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: E1203 05:43:50.017137 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.517120678 +0000 UTC m=+154.452581509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.017563 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-catalog-content\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.019084 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5j6v4" event={"ID":"74356fdb-1912-4ce1-8adb-90ab820418b4","Type":"ContainerStarted","Data":"ec6e89a0f1e5a984b2f5ae74407a396fe55a05cc88f8c72c2867d5b22775d75c"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.020796 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" event={"ID":"32ec7edc-8805-46d2-ac93-ecd76d6dc57a","Type":"ContainerStarted","Data":"9d3cbd45c3c3bb8c494a96658cd43aac0a8a8831aac9b5d7c2ade8874ed78171"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.024675 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-bplkj" podStartSLOduration=9.024663644 podStartE2EDuration="9.024663644s" podCreationTimestamp="2025-12-03 05:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.021357757 +0000 UTC m=+153.956818598" watchObservedRunningTime="2025-12-03 05:43:50.024663644 +0000 UTC m=+153.960124475" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.026814 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tqmtc" event={"ID":"f5d38389-215a-4200-949f-f2204bdf2228","Type":"ContainerStarted","Data":"281d9a8e005b495c736879262c6ffdfd73cdfe234f7741f5e0535872b469d044"} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.045915 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-2cbhj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.046001 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2cbhj" podUID="fe488bc4-f284-4b37-b4f5-ca9cfae32ed7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.047048 4810 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-22pk2 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.047128 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.050697 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m697x"] Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.075383 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m697x"] Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.075468 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.075497 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kf4q4" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.075616 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.088376 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rr2wk" podStartSLOduration=134.088350561 podStartE2EDuration="2m14.088350561s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.076590366 +0000 UTC m=+154.012051207" watchObservedRunningTime="2025-12-03 05:43:50.088350561 +0000 UTC m=+154.023811402" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.100744 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f97lg\" (UniqueName: \"kubernetes.io/projected/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-kube-api-access-f97lg\") pod \"certified-operators-mlr8j\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.101478 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.118591 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hkxh\" (UniqueName: \"kubernetes.io/projected/4d0fa646-0a48-471d-9168-08716ab96d5e-kube-api-access-9hkxh\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.118777 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-catalog-content\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.118827 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.118893 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-utilities\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: E1203 05:43:50.127035 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.627014614 +0000 UTC m=+154.562475455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.137079 4810 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.140230 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-g7l7x" podStartSLOduration=134.14021773 podStartE2EDuration="2m14.14021773s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.13821752 +0000 UTC m=+154.073678381" watchObservedRunningTime="2025-12-03 05:43:50.14021773 +0000 UTC m=+154.075678571" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.165014 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.217972 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wgxmb" podStartSLOduration=134.217953013 podStartE2EDuration="2m14.217953013s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.178977878 +0000 UTC m=+154.114438719" watchObservedRunningTime="2025-12-03 05:43:50.217953013 +0000 UTC m=+154.153413854" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.218090 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-szx7t" podStartSLOduration=134.218086407 podStartE2EDuration="2m14.218086407s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.212624525 +0000 UTC m=+154.148085366" watchObservedRunningTime="2025-12-03 05:43:50.218086407 +0000 UTC m=+154.153547258" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.221157 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.221453 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hkxh\" (UniqueName: \"kubernetes.io/projected/4d0fa646-0a48-471d-9168-08716ab96d5e-kube-api-access-9hkxh\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.221485 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-catalog-content\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.221513 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-utilities\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.221984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-utilities\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: E1203 05:43:50.222052 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.722036367 +0000 UTC m=+154.657497208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.222439 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-catalog-content\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.257147 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nzdh5"] Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.257327 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-jj69p" podStartSLOduration=134.2567169 podStartE2EDuration="2m14.2567169s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.255467836 +0000 UTC m=+154.190928677" watchObservedRunningTime="2025-12-03 05:43:50.2567169 +0000 UTC m=+154.192177741" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.258356 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.261018 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hkxh\" (UniqueName: \"kubernetes.io/projected/4d0fa646-0a48-471d-9168-08716ab96d5e-kube-api-access-9hkxh\") pod \"community-operators-m697x\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.294946 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nzdh5"] Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.314518 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" podStartSLOduration=135.314503629 podStartE2EDuration="2m15.314503629s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.312337862 +0000 UTC m=+154.247798713" watchObservedRunningTime="2025-12-03 05:43:50.314503629 +0000 UTC m=+154.249964470" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.323488 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4s57\" (UniqueName: \"kubernetes.io/projected/a3b0cacb-bba2-4966-9b2d-28a0410908ab-kube-api-access-j4s57\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.323544 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-catalog-content\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.323607 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.323628 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-utilities\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: E1203 05:43:50.323933 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.823921321 +0000 UTC m=+154.759382162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.346304 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" podStartSLOduration=135.34628581 podStartE2EDuration="2m15.34628581s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.345320056 +0000 UTC m=+154.280780917" watchObservedRunningTime="2025-12-03 05:43:50.34628581 +0000 UTC m=+154.281746651" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.386199 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-qhkvl" podStartSLOduration=134.386176867 podStartE2EDuration="2m14.386176867s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.386057213 +0000 UTC m=+154.321518054" watchObservedRunningTime="2025-12-03 05:43:50.386176867 +0000 UTC m=+154.321637708" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.419229 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" podStartSLOduration=134.419213152 podStartE2EDuration="2m14.419213152s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:50.416187896 +0000 UTC m=+154.351648737" watchObservedRunningTime="2025-12-03 05:43:50.419213152 +0000 UTC m=+154.354673993" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.420034 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.426915 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.427093 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-catalog-content\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.427177 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-utilities\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.427207 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4s57\" (UniqueName: \"kubernetes.io/projected/a3b0cacb-bba2-4966-9b2d-28a0410908ab-kube-api-access-j4s57\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: E1203 05:43:50.427616 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 05:43:50.927598228 +0000 UTC m=+154.863059069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.428033 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-catalog-content\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.428323 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-utilities\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.460293 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.462296 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wfl78"] Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.463305 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.463828 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4s57\" (UniqueName: \"kubernetes.io/projected/a3b0cacb-bba2-4966-9b2d-28a0410908ab-kube-api-access-j4s57\") pod \"certified-operators-nzdh5\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.486773 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wfl78"] Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.533768 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-catalog-content\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.533808 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd897\" (UniqueName: \"kubernetes.io/projected/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-kube-api-access-xd897\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.533827 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-utilities\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.533867 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:50 crc kubenswrapper[4810]: E1203 05:43:50.534170 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 05:43:51.034156107 +0000 UTC m=+154.969616948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cqx4p" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.582393 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rrbcq" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.588848 4810 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T05:43:50.137446743Z","Handler":null,"Name":""} Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.601034 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.614478 4810 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.614514 4810 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.661110 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.661906 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-catalog-content\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.661943 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd897\" (UniqueName: \"kubernetes.io/projected/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-kube-api-access-xd897\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.661965 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-utilities\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.675420 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-utilities\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.677965 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-catalog-content\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.697979 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.700089 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd897\" (UniqueName: \"kubernetes.io/projected/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-kube-api-access-xd897\") pod \"community-operators-wfl78\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.763284 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.769911 4810 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.769965 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.818051 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.834091 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cqx4p\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.902904 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:50 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:50 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:50 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.902965 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:50 crc kubenswrapper[4810]: I1203 05:43:50.994308 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m697x"] Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.049120 4810 generic.go:334] "Generic (PLEG): container finished" podID="b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" containerID="8fc8dba3ad266dc8ccc256813c60a7d3dd765296d224de2842871c78d19ab012" exitCode=0 Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.049269 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" event={"ID":"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03","Type":"ContainerDied","Data":"8fc8dba3ad266dc8ccc256813c60a7d3dd765296d224de2842871c78d19ab012"} Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.055649 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.056033 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m697x" event={"ID":"4d0fa646-0a48-471d-9168-08716ab96d5e","Type":"ContainerStarted","Data":"2d40d1a38b1725e020be29632ce2f64523058a4fd7842dc7531f49aa86315888"} Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.061933 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nzdh5"] Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.078004 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mlr8j"] Dec 03 05:43:51 crc kubenswrapper[4810]: W1203 05:43:51.099084 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c72e00b_cee9_4cee_bdd7_9f65dbb60c8d.slice/crio-ee6a5f5fd4ff3707e9a9c5c20fe8c4d6e9f836541b967967e50e41e35943bbdc WatchSource:0}: Error finding container ee6a5f5fd4ff3707e9a9c5c20fe8c4d6e9f836541b967967e50e41e35943bbdc: Status 404 returned error can't find the container with id ee6a5f5fd4ff3707e9a9c5c20fe8c4d6e9f836541b967967e50e41e35943bbdc Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.119273 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" event={"ID":"ab1ed666-8ebc-4549-af10-a2547bd44f9c","Type":"ContainerStarted","Data":"cdfed990c084f5119c0e231a74691de16811cdd410828658abd69e61791b06b6"} Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.119311 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" event={"ID":"ab1ed666-8ebc-4549-af10-a2547bd44f9c","Type":"ContainerStarted","Data":"8768d6d56e4e119ece1fdb023a5126c9495e03090c35de22b64d8a4c160be536"} Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.119478 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-2cbhj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.119507 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2cbhj" podUID="fe488bc4-f284-4b37-b4f5-ca9cfae32ed7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.157434 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-rfxm8" podStartSLOduration=10.157408413 podStartE2EDuration="10.157408413s" podCreationTimestamp="2025-12-03 05:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:51.145318997 +0000 UTC m=+155.080779838" watchObservedRunningTime="2025-12-03 05:43:51.157408413 +0000 UTC m=+155.092869264" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.270125 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wfl78"] Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.501051 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cqx4p"] Dec 03 05:43:51 crc kubenswrapper[4810]: W1203 05:43:51.511638 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73510d8a_e4fc_4187_bb00_e4d9435c8d33.slice/crio-0d8eb676d0329e319172f7ede50385f2ebf216d9ffd75b056a1a6794345d32e4 WatchSource:0}: Error finding container 0d8eb676d0329e319172f7ede50385f2ebf216d9ffd75b056a1a6794345d32e4: Status 404 returned error can't find the container with id 0d8eb676d0329e319172f7ede50385f2ebf216d9ffd75b056a1a6794345d32e4 Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.837417 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jxjsv"] Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.851986 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.854548 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.856773 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxjsv"] Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.902285 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:51 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:51 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:51 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.902696 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.990140 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-utilities\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.990490 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72zg4\" (UniqueName: \"kubernetes.io/projected/f0b0d549-514c-4f29-901d-de91ae9e5242-kube-api-access-72zg4\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:51 crc kubenswrapper[4810]: I1203 05:43:51.990618 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-catalog-content\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.091536 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72zg4\" (UniqueName: \"kubernetes.io/projected/f0b0d549-514c-4f29-901d-de91ae9e5242-kube-api-access-72zg4\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.092007 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-catalog-content\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.092135 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-utilities\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.092668 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-utilities\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.093350 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-catalog-content\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.116679 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72zg4\" (UniqueName: \"kubernetes.io/projected/f0b0d549-514c-4f29-901d-de91ae9e5242-kube-api-access-72zg4\") pod \"redhat-marketplace-jxjsv\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.126951 4810 generic.go:334] "Generic (PLEG): container finished" podID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerID="a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a" exitCode=0 Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.127325 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nzdh5" event={"ID":"a3b0cacb-bba2-4966-9b2d-28a0410908ab","Type":"ContainerDied","Data":"a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.127419 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nzdh5" event={"ID":"a3b0cacb-bba2-4966-9b2d-28a0410908ab","Type":"ContainerStarted","Data":"713798688aca7cad2a2021fefc9d74b5d7ef36876b0a57234dfbb956e4246c72"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.133846 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.134492 4810 generic.go:334] "Generic (PLEG): container finished" podID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerID="7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39" exitCode=0 Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.134643 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m697x" event={"ID":"4d0fa646-0a48-471d-9168-08716ab96d5e","Type":"ContainerDied","Data":"7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.137125 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" event={"ID":"73510d8a-e4fc-4187-bb00-e4d9435c8d33","Type":"ContainerStarted","Data":"8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.137167 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" event={"ID":"73510d8a-e4fc-4187-bb00-e4d9435c8d33","Type":"ContainerStarted","Data":"0d8eb676d0329e319172f7ede50385f2ebf216d9ffd75b056a1a6794345d32e4"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.137785 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.145874 4810 generic.go:334] "Generic (PLEG): container finished" podID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerID="d57d115e9651ea29066bc78a229a5b009ba879113414a71f7825444ffb835470" exitCode=0 Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.146838 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wfl78" event={"ID":"ad9d333a-2c2a-4d28-8e28-821458b6e5f0","Type":"ContainerDied","Data":"d57d115e9651ea29066bc78a229a5b009ba879113414a71f7825444ffb835470"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.146874 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wfl78" event={"ID":"ad9d333a-2c2a-4d28-8e28-821458b6e5f0","Type":"ContainerStarted","Data":"382308d226adeb9f00c7241ba25899ed1a25e93db851899184ed79cd03d60f37"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.152672 4810 generic.go:334] "Generic (PLEG): container finished" podID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerID="b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869" exitCode=0 Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.153480 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mlr8j" event={"ID":"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d","Type":"ContainerDied","Data":"b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.153512 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mlr8j" event={"ID":"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d","Type":"ContainerStarted","Data":"ee6a5f5fd4ff3707e9a9c5c20fe8c4d6e9f836541b967967e50e41e35943bbdc"} Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.166298 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.207257 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" podStartSLOduration=136.207230238 podStartE2EDuration="2m16.207230238s" podCreationTimestamp="2025-12-03 05:41:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:52.171721115 +0000 UTC m=+156.107181966" watchObservedRunningTime="2025-12-03 05:43:52.207230238 +0000 UTC m=+156.142691079" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.240793 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gj4t7"] Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.242227 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.250561 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gj4t7"] Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.375873 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.387502 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.397557 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-utilities\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.397741 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcklw\" (UniqueName: \"kubernetes.io/projected/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-kube-api-access-qcklw\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.397813 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-catalog-content\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.415816 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxjsv"] Dec 03 05:43:52 crc kubenswrapper[4810]: W1203 05:43:52.423319 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0b0d549_514c_4f29_901d_de91ae9e5242.slice/crio-fc6f30374d440c3a24afc8ecd6013d11827e7aa74d802f005942c2404411b063 WatchSource:0}: Error finding container fc6f30374d440c3a24afc8ecd6013d11827e7aa74d802f005942c2404411b063: Status 404 returned error can't find the container with id fc6f30374d440c3a24afc8ecd6013d11827e7aa74d802f005942c2404411b063 Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.498634 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lmf7\" (UniqueName: \"kubernetes.io/projected/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-kube-api-access-8lmf7\") pod \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.498692 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-config-volume\") pod \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.499055 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-secret-volume\") pod \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\" (UID: \"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03\") " Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.499600 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-config-volume" (OuterVolumeSpecName: "config-volume") pod "b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" (UID: "b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.499649 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-utilities\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.499929 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcklw\" (UniqueName: \"kubernetes.io/projected/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-kube-api-access-qcklw\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.499958 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-catalog-content\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.499996 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.500197 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-utilities\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.500579 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-catalog-content\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.503914 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" (UID: "b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.504442 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-kube-api-access-8lmf7" (OuterVolumeSpecName: "kube-api-access-8lmf7") pod "b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" (UID: "b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03"). InnerVolumeSpecName "kube-api-access-8lmf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.517868 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcklw\" (UniqueName: \"kubernetes.io/projected/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-kube-api-access-qcklw\") pod \"redhat-marketplace-gj4t7\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.585230 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.601111 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.601190 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lmf7\" (UniqueName: \"kubernetes.io/projected/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03-kube-api-access-8lmf7\") on node \"crc\" DevicePath \"\"" Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.790133 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gj4t7"] Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.903819 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:52 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:52 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:52 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:52 crc kubenswrapper[4810]: I1203 05:43:52.903942 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.035959 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kbbl7"] Dec 03 05:43:53 crc kubenswrapper[4810]: E1203 05:43:53.036222 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" containerName="collect-profiles" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.036234 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" containerName="collect-profiles" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.036353 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" containerName="collect-profiles" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.037285 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.039241 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.045711 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kbbl7"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.084510 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.086209 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.088568 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.088603 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.089455 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.171293 4810 generic.go:334] "Generic (PLEG): container finished" podID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerID="c87e35e8f74b677a21deb5af1b419076e6d41fc3fe419976d721947d32499e42" exitCode=0 Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.171355 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxjsv" event={"ID":"f0b0d549-514c-4f29-901d-de91ae9e5242","Type":"ContainerDied","Data":"c87e35e8f74b677a21deb5af1b419076e6d41fc3fe419976d721947d32499e42"} Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.171395 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxjsv" event={"ID":"f0b0d549-514c-4f29-901d-de91ae9e5242","Type":"ContainerStarted","Data":"fc6f30374d440c3a24afc8ecd6013d11827e7aa74d802f005942c2404411b063"} Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.174225 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" event={"ID":"b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03","Type":"ContainerDied","Data":"8e3ace54d49149f44a91e559af4fe364f72fa7d219c867a8744a39ef77153e4a"} Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.174259 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e3ace54d49149f44a91e559af4fe364f72fa7d219c867a8744a39ef77153e4a" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.174297 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.178925 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerStarted","Data":"f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918"} Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.178953 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerStarted","Data":"557a219016ae0ac8c53f06063c0636c89db848975a29a49d4b13653851573231"} Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.207870 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.207993 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fgzx\" (UniqueName: \"kubernetes.io/projected/9edee8b2-6d3b-43ae-bee4-28739f3865bc-kube-api-access-9fgzx\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.208113 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-utilities\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.208209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-catalog-content\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.208259 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.309291 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fgzx\" (UniqueName: \"kubernetes.io/projected/9edee8b2-6d3b-43ae-bee4-28739f3865bc-kube-api-access-9fgzx\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.309394 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-utilities\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.309480 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-catalog-content\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.309521 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.309564 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.311230 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.311581 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-catalog-content\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.311610 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-utilities\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.329414 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fgzx\" (UniqueName: \"kubernetes.io/projected/9edee8b2-6d3b-43ae-bee4-28739f3865bc-kube-api-access-9fgzx\") pod \"redhat-operators-kbbl7\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.331412 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.335208 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.335262 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.341595 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.372045 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.406633 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.441328 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zkl7b"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.442774 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.447295 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zkl7b"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.512779 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-utilities\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.512840 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-catalog-content\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.512870 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fgzd\" (UniqueName: \"kubernetes.io/projected/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-kube-api-access-5fgzd\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.615505 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-utilities\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.615566 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-catalog-content\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.615608 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fgzd\" (UniqueName: \"kubernetes.io/projected/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-kube-api-access-5fgzd\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.616668 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-utilities\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.616934 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-catalog-content\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.650594 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fgzd\" (UniqueName: \"kubernetes.io/projected/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-kube-api-access-5fgzd\") pod \"redhat-operators-zkl7b\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.717188 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.738886 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kbbl7"] Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.766144 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-2cbhj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.766203 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2cbhj" podUID="fe488bc4-f284-4b37-b4f5-ca9cfae32ed7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.766921 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-2cbhj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.766944 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2cbhj" podUID="fe488bc4-f284-4b37-b4f5-ca9cfae32ed7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.811782 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.814373 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.814416 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.818215 4810 patch_prober.go:28] interesting pod/console-f9d7485db-fjrzc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.818264 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-fjrzc" podUID="d773e027-f95d-450b-bacc-f30b1235784c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.899082 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.903467 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:53 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:53 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:53 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:53 crc kubenswrapper[4810]: I1203 05:43:53.903516 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.082938 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zkl7b"] Dec 03 05:43:54 crc kubenswrapper[4810]: W1203 05:43:54.097462 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd2bc6c4_3ca3_49c8_b988_a1dd315d1427.slice/crio-6dadda76a05c87541467d120830059dbd89e06e8d17bef8d24882de62bf8eadf WatchSource:0}: Error finding container 6dadda76a05c87541467d120830059dbd89e06e8d17bef8d24882de62bf8eadf: Status 404 returned error can't find the container with id 6dadda76a05c87541467d120830059dbd89e06e8d17bef8d24882de62bf8eadf Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.203701 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerStarted","Data":"6dadda76a05c87541467d120830059dbd89e06e8d17bef8d24882de62bf8eadf"} Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.208187 4810 generic.go:334] "Generic (PLEG): container finished" podID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerID="faae3be713b1dc6678429d6c8057a9a379b18321181f49b7aa496bf8bb18aa7b" exitCode=0 Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.208256 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerDied","Data":"faae3be713b1dc6678429d6c8057a9a379b18321181f49b7aa496bf8bb18aa7b"} Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.208281 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerStarted","Data":"586d9d6f86d370ef3de8841592ff70652f03c389d16003a7e4837046d047d9e7"} Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.219469 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerID="f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918" exitCode=0 Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.219666 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerDied","Data":"f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918"} Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.224122 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7","Type":"ContainerStarted","Data":"4cb7ab3428a03de33cf67dd06b400a080e08b880c86502f9e7e4505421f73c70"} Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.228796 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-tp4fv" Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.253971 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.253843914 podStartE2EDuration="1.253843914s" podCreationTimestamp="2025-12-03 05:43:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:54.248240446 +0000 UTC m=+158.183701287" watchObservedRunningTime="2025-12-03 05:43:54.253843914 +0000 UTC m=+158.189304765" Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.277656 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.903324 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:54 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:54 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:54 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:54 crc kubenswrapper[4810]: I1203 05:43:54.903858 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.170185 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.172079 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.177371 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.185931 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.186760 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.210595 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ac08a08-d73b-461d-902b-75af697e01e0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.210652 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ac08a08-d73b-461d-902b-75af697e01e0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.277417 4810 generic.go:334] "Generic (PLEG): container finished" podID="a15a4d2a-63e7-4c98-a988-5f4f608f4aa7" containerID="1ff66ba4f470e8c70899324001bc2073deb442a60b315e1bf69585c794f7fad0" exitCode=0 Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.277581 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7","Type":"ContainerDied","Data":"1ff66ba4f470e8c70899324001bc2073deb442a60b315e1bf69585c794f7fad0"} Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.293871 4810 generic.go:334] "Generic (PLEG): container finished" podID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerID="07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2" exitCode=0 Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.294148 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerDied","Data":"07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2"} Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.312507 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ac08a08-d73b-461d-902b-75af697e01e0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.312564 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ac08a08-d73b-461d-902b-75af697e01e0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.312666 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ac08a08-d73b-461d-902b-75af697e01e0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.355851 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ac08a08-d73b-461d-902b-75af697e01e0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.500321 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.678639 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.679148 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.901751 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:55 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:55 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:55 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:55 crc kubenswrapper[4810]: I1203 05:43:55.901805 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.164070 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.326364 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8ac08a08-d73b-461d-902b-75af697e01e0","Type":"ContainerStarted","Data":"99425a5aedeea0fc34b6bfd31c4e3bdf94bfdad68745b4a877670085c7a8a6df"} Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.772534 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.849294 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kubelet-dir\") pod \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.849400 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kube-api-access\") pod \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\" (UID: \"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7\") " Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.849417 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a15a4d2a-63e7-4c98-a988-5f4f608f4aa7" (UID: "a15a4d2a-63e7-4c98-a988-5f4f608f4aa7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.849784 4810 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.864866 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a15a4d2a-63e7-4c98-a988-5f4f608f4aa7" (UID: "a15a4d2a-63e7-4c98-a988-5f4f608f4aa7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.909076 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:56 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:56 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:56 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.909212 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:56 crc kubenswrapper[4810]: I1203 05:43:56.951559 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a15a4d2a-63e7-4c98-a988-5f4f608f4aa7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:43:57 crc kubenswrapper[4810]: I1203 05:43:57.353241 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a15a4d2a-63e7-4c98-a988-5f4f608f4aa7","Type":"ContainerDied","Data":"4cb7ab3428a03de33cf67dd06b400a080e08b880c86502f9e7e4505421f73c70"} Dec 03 05:43:57 crc kubenswrapper[4810]: I1203 05:43:57.353293 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cb7ab3428a03de33cf67dd06b400a080e08b880c86502f9e7e4505421f73c70" Dec 03 05:43:57 crc kubenswrapper[4810]: I1203 05:43:57.353347 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 05:43:57 crc kubenswrapper[4810]: I1203 05:43:57.902404 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:57 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:57 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:57 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:57 crc kubenswrapper[4810]: I1203 05:43:57.902525 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.418449 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8ac08a08-d73b-461d-902b-75af697e01e0","Type":"ContainerStarted","Data":"942752b2ab832f8be77195dac712ad6f026a29b499ed711b1eb06928383c50e3"} Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.427025 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.427002068 podStartE2EDuration="3.427002068s" podCreationTimestamp="2025-12-03 05:43:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:43:58.417119199 +0000 UTC m=+162.352580060" watchObservedRunningTime="2025-12-03 05:43:58.427002068 +0000 UTC m=+162.362462909" Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.589192 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.599317 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7283fe50-3c8e-4b8b-90ac-80e0e2c9a746-metrics-certs\") pod \"network-metrics-daemon-zx6mp\" (UID: \"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746\") " pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.803974 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zx6mp" Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.904131 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:58 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:58 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:58 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:58 crc kubenswrapper[4810]: I1203 05:43:58.904202 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:43:59 crc kubenswrapper[4810]: I1203 05:43:59.106079 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-bplkj" Dec 03 05:43:59 crc kubenswrapper[4810]: I1203 05:43:59.902626 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:43:59 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:43:59 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:43:59 crc kubenswrapper[4810]: healthz check failed Dec 03 05:43:59 crc kubenswrapper[4810]: I1203 05:43:59.903065 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:44:00 crc kubenswrapper[4810]: I1203 05:44:00.430543 4810 generic.go:334] "Generic (PLEG): container finished" podID="8ac08a08-d73b-461d-902b-75af697e01e0" containerID="942752b2ab832f8be77195dac712ad6f026a29b499ed711b1eb06928383c50e3" exitCode=0 Dec 03 05:44:00 crc kubenswrapper[4810]: I1203 05:44:00.430588 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8ac08a08-d73b-461d-902b-75af697e01e0","Type":"ContainerDied","Data":"942752b2ab832f8be77195dac712ad6f026a29b499ed711b1eb06928383c50e3"} Dec 03 05:44:00 crc kubenswrapper[4810]: I1203 05:44:00.903667 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:44:00 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:44:00 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:44:00 crc kubenswrapper[4810]: healthz check failed Dec 03 05:44:00 crc kubenswrapper[4810]: I1203 05:44:00.903843 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:44:01 crc kubenswrapper[4810]: I1203 05:44:01.902003 4810 patch_prober.go:28] interesting pod/router-default-5444994796-zqmjf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 05:44:01 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Dec 03 05:44:01 crc kubenswrapper[4810]: [+]process-running ok Dec 03 05:44:01 crc kubenswrapper[4810]: healthz check failed Dec 03 05:44:01 crc kubenswrapper[4810]: I1203 05:44:01.902417 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zqmjf" podUID="fca02554-5b20-4ad8-b7a2-1172f7aa463c" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 05:44:02 crc kubenswrapper[4810]: I1203 05:44:02.902085 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:44:02 crc kubenswrapper[4810]: I1203 05:44:02.905135 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-zqmjf" Dec 03 05:44:03 crc kubenswrapper[4810]: I1203 05:44:03.799045 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2cbhj" Dec 03 05:44:03 crc kubenswrapper[4810]: I1203 05:44:03.814676 4810 patch_prober.go:28] interesting pod/console-f9d7485db-fjrzc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 03 05:44:03 crc kubenswrapper[4810]: I1203 05:44:03.814761 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-fjrzc" podUID="d773e027-f95d-450b-bacc-f30b1235784c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.102586 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.154121 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ac08a08-d73b-461d-902b-75af697e01e0-kubelet-dir\") pod \"8ac08a08-d73b-461d-902b-75af697e01e0\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.154288 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ac08a08-d73b-461d-902b-75af697e01e0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8ac08a08-d73b-461d-902b-75af697e01e0" (UID: "8ac08a08-d73b-461d-902b-75af697e01e0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.154362 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ac08a08-d73b-461d-902b-75af697e01e0-kube-api-access\") pod \"8ac08a08-d73b-461d-902b-75af697e01e0\" (UID: \"8ac08a08-d73b-461d-902b-75af697e01e0\") " Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.154659 4810 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ac08a08-d73b-461d-902b-75af697e01e0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.175930 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ac08a08-d73b-461d-902b-75af697e01e0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8ac08a08-d73b-461d-902b-75af697e01e0" (UID: "8ac08a08-d73b-461d-902b-75af697e01e0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.256435 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ac08a08-d73b-461d-902b-75af697e01e0-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.504093 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8ac08a08-d73b-461d-902b-75af697e01e0","Type":"ContainerDied","Data":"99425a5aedeea0fc34b6bfd31c4e3bdf94bfdad68745b4a877670085c7a8a6df"} Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.504145 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99425a5aedeea0fc34b6bfd31c4e3bdf94bfdad68745b4a877670085c7a8a6df" Dec 03 05:44:07 crc kubenswrapper[4810]: I1203 05:44:07.504209 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 05:44:11 crc kubenswrapper[4810]: I1203 05:44:11.061297 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:44:13 crc kubenswrapper[4810]: I1203 05:44:13.819445 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:44:13 crc kubenswrapper[4810]: I1203 05:44:13.825694 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:44:23 crc kubenswrapper[4810]: I1203 05:44:23.875291 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 05:44:24 crc kubenswrapper[4810]: I1203 05:44:24.325678 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wjfz7" Dec 03 05:44:25 crc kubenswrapper[4810]: I1203 05:44:25.677603 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:44:25 crc kubenswrapper[4810]: I1203 05:44:25.677708 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:44:28 crc kubenswrapper[4810]: E1203 05:44:28.893594 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 05:44:28 crc kubenswrapper[4810]: E1203 05:44:28.894319 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9fgzx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kbbl7_openshift-marketplace(9edee8b2-6d3b-43ae-bee4-28739f3865bc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 05:44:28 crc kubenswrapper[4810]: E1203 05:44:28.895997 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kbbl7" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" Dec 03 05:44:28 crc kubenswrapper[4810]: E1203 05:44:28.942980 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 05:44:28 crc kubenswrapper[4810]: E1203 05:44:28.943214 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5fgzd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-zkl7b_openshift-marketplace(dd2bc6c4-3ca3-49c8-b988-a1dd315d1427): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 05:44:28 crc kubenswrapper[4810]: E1203 05:44:28.944418 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-zkl7b" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.113029 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-zkl7b" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.113507 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kbbl7" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.181445 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.181610 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qcklw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-gj4t7_openshift-marketplace(4f4a5cea-9a0e-4387-b894-7ac5d56dcde2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.182887 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-gj4t7" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.210124 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.210310 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-72zg4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-jxjsv_openshift-marketplace(f0b0d549-514c-4f29-901d-de91ae9e5242): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.213045 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-jxjsv" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.372363 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.372756 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ac08a08-d73b-461d-902b-75af697e01e0" containerName="pruner" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.372773 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ac08a08-d73b-461d-902b-75af697e01e0" containerName="pruner" Dec 03 05:44:30 crc kubenswrapper[4810]: E1203 05:44:30.372796 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15a4d2a-63e7-4c98-a988-5f4f608f4aa7" containerName="pruner" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.372804 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15a4d2a-63e7-4c98-a988-5f4f608f4aa7" containerName="pruner" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.372926 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ac08a08-d73b-461d-902b-75af697e01e0" containerName="pruner" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.372937 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15a4d2a-63e7-4c98-a988-5f4f608f4aa7" containerName="pruner" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.373503 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.375962 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.377598 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.380205 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.490506 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.490631 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.591540 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.591650 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.591718 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.612801 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:30 crc kubenswrapper[4810]: I1203 05:44:30.704012 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:31 crc kubenswrapper[4810]: E1203 05:44:31.956099 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-jxjsv" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" Dec 03 05:44:31 crc kubenswrapper[4810]: E1203 05:44:31.956599 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-gj4t7" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.081850 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.082343 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j4s57,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-nzdh5_openshift-marketplace(a3b0cacb-bba2-4966-9b2d-28a0410908ab): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.084057 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-nzdh5" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.126852 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.127067 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f97lg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mlr8j_openshift-marketplace(3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.128687 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mlr8j" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.399290 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.465368 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zx6mp"] Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.651786 4810 generic.go:334] "Generic (PLEG): container finished" podID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerID="30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132" exitCode=0 Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.651903 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m697x" event={"ID":"4d0fa646-0a48-471d-9168-08716ab96d5e","Type":"ContainerDied","Data":"30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132"} Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.655457 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e541b8ff-cee1-4236-84fc-0b4d8a7ad103","Type":"ContainerStarted","Data":"5ff5f6864b12c2927f35224de210186bc727602d5ed11ab6ccc030b9471e6634"} Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.659573 4810 generic.go:334] "Generic (PLEG): container finished" podID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerID="5900955b574d2cd6e8d3a85610f2cb2c7f4784750bea42cf5b4159982e626969" exitCode=0 Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.659705 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wfl78" event={"ID":"ad9d333a-2c2a-4d28-8e28-821458b6e5f0","Type":"ContainerDied","Data":"5900955b574d2cd6e8d3a85610f2cb2c7f4784750bea42cf5b4159982e626969"} Dec 03 05:44:32 crc kubenswrapper[4810]: I1203 05:44:32.661877 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" event={"ID":"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746","Type":"ContainerStarted","Data":"669f77415e58a089d33347bc4229382b24020a4ec0bf73a6405a6eb2763c5a8d"} Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.665836 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-nzdh5" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" Dec 03 05:44:32 crc kubenswrapper[4810]: E1203 05:44:32.666083 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mlr8j" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.671189 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" event={"ID":"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746","Type":"ContainerStarted","Data":"0389740b81014d0e6fd4e03a8789e83e4c1c3de0897385dd5a9b253880a8e965"} Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.671824 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zx6mp" event={"ID":"7283fe50-3c8e-4b8b-90ac-80e0e2c9a746","Type":"ContainerStarted","Data":"1ed0eb0cd6c05e3d26d87c9fc3cd05a9a6598f526205b04591367a8e6829e3d4"} Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.673633 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m697x" event={"ID":"4d0fa646-0a48-471d-9168-08716ab96d5e","Type":"ContainerStarted","Data":"551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e"} Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.676108 4810 generic.go:334] "Generic (PLEG): container finished" podID="e541b8ff-cee1-4236-84fc-0b4d8a7ad103" containerID="55f6a08a4bc87e5bac7b85e04be5db84684038666bdd452693c622a9232e509a" exitCode=0 Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.676188 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e541b8ff-cee1-4236-84fc-0b4d8a7ad103","Type":"ContainerDied","Data":"55f6a08a4bc87e5bac7b85e04be5db84684038666bdd452693c622a9232e509a"} Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.679296 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wfl78" event={"ID":"ad9d333a-2c2a-4d28-8e28-821458b6e5f0","Type":"ContainerStarted","Data":"ec0b71b8f568f24d2a6fb159acc1a52478d10a4167b77a31d62e3243db803450"} Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.693527 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-zx6mp" podStartSLOduration=178.693504825 podStartE2EDuration="2m58.693504825s" podCreationTimestamp="2025-12-03 05:41:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:44:33.691062009 +0000 UTC m=+197.626522880" watchObservedRunningTime="2025-12-03 05:44:33.693504825 +0000 UTC m=+197.628965666" Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.711847 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wfl78" podStartSLOduration=2.7442793869999997 podStartE2EDuration="43.711832959s" podCreationTimestamp="2025-12-03 05:43:50 +0000 UTC" firstStartedPulling="2025-12-03 05:43:52.149370456 +0000 UTC m=+156.084831297" lastFinishedPulling="2025-12-03 05:44:33.116924028 +0000 UTC m=+197.052384869" observedRunningTime="2025-12-03 05:44:33.711649074 +0000 UTC m=+197.647109915" watchObservedRunningTime="2025-12-03 05:44:33.711832959 +0000 UTC m=+197.647293800" Dec 03 05:44:33 crc kubenswrapper[4810]: I1203 05:44:33.750705 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m697x" podStartSLOduration=2.8032987560000002 podStartE2EDuration="43.750686817s" podCreationTimestamp="2025-12-03 05:43:50 +0000 UTC" firstStartedPulling="2025-12-03 05:43:52.135992985 +0000 UTC m=+156.071453826" lastFinishedPulling="2025-12-03 05:44:33.083381046 +0000 UTC m=+197.018841887" observedRunningTime="2025-12-03 05:44:33.749601817 +0000 UTC m=+197.685062658" watchObservedRunningTime="2025-12-03 05:44:33.750686817 +0000 UTC m=+197.686147658" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.082073 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.157382 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kubelet-dir\") pod \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.157493 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kube-api-access\") pod \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\" (UID: \"e541b8ff-cee1-4236-84fc-0b4d8a7ad103\") " Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.157583 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e541b8ff-cee1-4236-84fc-0b4d8a7ad103" (UID: "e541b8ff-cee1-4236-84fc-0b4d8a7ad103"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.157827 4810 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.164926 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e541b8ff-cee1-4236-84fc-0b4d8a7ad103" (UID: "e541b8ff-cee1-4236-84fc-0b4d8a7ad103"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.259101 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e541b8ff-cee1-4236-84fc-0b4d8a7ad103-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.691764 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e541b8ff-cee1-4236-84fc-0b4d8a7ad103","Type":"ContainerDied","Data":"5ff5f6864b12c2927f35224de210186bc727602d5ed11ab6ccc030b9471e6634"} Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.691801 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ff5f6864b12c2927f35224de210186bc727602d5ed11ab6ccc030b9471e6634" Dec 03 05:44:35 crc kubenswrapper[4810]: I1203 05:44:35.691811 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.364600 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 05:44:36 crc kubenswrapper[4810]: E1203 05:44:36.364882 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e541b8ff-cee1-4236-84fc-0b4d8a7ad103" containerName="pruner" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.364897 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e541b8ff-cee1-4236-84fc-0b4d8a7ad103" containerName="pruner" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.365015 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e541b8ff-cee1-4236-84fc-0b4d8a7ad103" containerName="pruner" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.365442 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.367935 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.368212 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.371058 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.475041 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.475102 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-var-lock\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.475149 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd24987-2a4f-406c-a597-95ddf6ad7908-kube-api-access\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.576368 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-var-lock\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.576436 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd24987-2a4f-406c-a597-95ddf6ad7908-kube-api-access\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.576494 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.576485 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-var-lock\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.576564 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.594446 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd24987-2a4f-406c-a597-95ddf6ad7908-kube-api-access\") pod \"installer-9-crc\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:36 crc kubenswrapper[4810]: I1203 05:44:36.697864 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:44:37 crc kubenswrapper[4810]: I1203 05:44:37.078731 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 05:44:37 crc kubenswrapper[4810]: W1203 05:44:37.092453 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2cd24987_2a4f_406c_a597_95ddf6ad7908.slice/crio-7b0fd2ed52efa8c943a1a84a3825f6fa40845b3c8c64cada4e8161750270ce1f WatchSource:0}: Error finding container 7b0fd2ed52efa8c943a1a84a3825f6fa40845b3c8c64cada4e8161750270ce1f: Status 404 returned error can't find the container with id 7b0fd2ed52efa8c943a1a84a3825f6fa40845b3c8c64cada4e8161750270ce1f Dec 03 05:44:37 crc kubenswrapper[4810]: I1203 05:44:37.703723 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2cd24987-2a4f-406c-a597-95ddf6ad7908","Type":"ContainerStarted","Data":"b4fd46350162278bd02f2655a1d2489ece39dba9d34b3dc0dda4a0ad67b7476f"} Dec 03 05:44:37 crc kubenswrapper[4810]: I1203 05:44:37.703782 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2cd24987-2a4f-406c-a597-95ddf6ad7908","Type":"ContainerStarted","Data":"7b0fd2ed52efa8c943a1a84a3825f6fa40845b3c8c64cada4e8161750270ce1f"} Dec 03 05:44:37 crc kubenswrapper[4810]: I1203 05:44:37.718462 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.718438973 podStartE2EDuration="1.718438973s" podCreationTimestamp="2025-12-03 05:44:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:44:37.716660985 +0000 UTC m=+201.652121826" watchObservedRunningTime="2025-12-03 05:44:37.718438973 +0000 UTC m=+201.653899824" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.420975 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.421507 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.535075 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.767487 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.819063 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.819105 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:44:40 crc kubenswrapper[4810]: I1203 05:44:40.858447 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:44:41 crc kubenswrapper[4810]: I1203 05:44:41.773421 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:44:42 crc kubenswrapper[4810]: I1203 05:44:42.734890 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerStarted","Data":"ea11098e09c5a870ccc0f0591dd7e3857cd8abe4f9788066d66b6aaee26f7cc2"} Dec 03 05:44:43 crc kubenswrapper[4810]: I1203 05:44:43.741986 4810 generic.go:334] "Generic (PLEG): container finished" podID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerID="ea11098e09c5a870ccc0f0591dd7e3857cd8abe4f9788066d66b6aaee26f7cc2" exitCode=0 Dec 03 05:44:43 crc kubenswrapper[4810]: I1203 05:44:43.742035 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerDied","Data":"ea11098e09c5a870ccc0f0591dd7e3857cd8abe4f9788066d66b6aaee26f7cc2"} Dec 03 05:44:44 crc kubenswrapper[4810]: I1203 05:44:44.163543 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wfl78"] Dec 03 05:44:44 crc kubenswrapper[4810]: I1203 05:44:44.163828 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wfl78" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="registry-server" containerID="cri-o://ec0b71b8f568f24d2a6fb159acc1a52478d10a4167b77a31d62e3243db803450" gracePeriod=2 Dec 03 05:44:44 crc kubenswrapper[4810]: I1203 05:44:44.750112 4810 generic.go:334] "Generic (PLEG): container finished" podID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerID="ec0b71b8f568f24d2a6fb159acc1a52478d10a4167b77a31d62e3243db803450" exitCode=0 Dec 03 05:44:44 crc kubenswrapper[4810]: I1203 05:44:44.750187 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wfl78" event={"ID":"ad9d333a-2c2a-4d28-8e28-821458b6e5f0","Type":"ContainerDied","Data":"ec0b71b8f568f24d2a6fb159acc1a52478d10a4167b77a31d62e3243db803450"} Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.274289 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.308700 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd897\" (UniqueName: \"kubernetes.io/projected/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-kube-api-access-xd897\") pod \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.308803 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-utilities\") pod \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.308891 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-catalog-content\") pod \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\" (UID: \"ad9d333a-2c2a-4d28-8e28-821458b6e5f0\") " Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.309635 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-utilities" (OuterVolumeSpecName: "utilities") pod "ad9d333a-2c2a-4d28-8e28-821458b6e5f0" (UID: "ad9d333a-2c2a-4d28-8e28-821458b6e5f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.317400 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-kube-api-access-xd897" (OuterVolumeSpecName: "kube-api-access-xd897") pod "ad9d333a-2c2a-4d28-8e28-821458b6e5f0" (UID: "ad9d333a-2c2a-4d28-8e28-821458b6e5f0"). InnerVolumeSpecName "kube-api-access-xd897". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.390134 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad9d333a-2c2a-4d28-8e28-821458b6e5f0" (UID: "ad9d333a-2c2a-4d28-8e28-821458b6e5f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.410521 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd897\" (UniqueName: \"kubernetes.io/projected/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-kube-api-access-xd897\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.410558 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.410568 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad9d333a-2c2a-4d28-8e28-821458b6e5f0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.758693 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerStarted","Data":"6d0498ae4e05e0b29d278205ebbec637427299144fd204442428bf854e24376f"} Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.765072 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerID="efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af" exitCode=0 Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.765103 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerDied","Data":"efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af"} Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.768072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wfl78" event={"ID":"ad9d333a-2c2a-4d28-8e28-821458b6e5f0","Type":"ContainerDied","Data":"382308d226adeb9f00c7241ba25899ed1a25e93db851899184ed79cd03d60f37"} Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.768092 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wfl78" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.768128 4810 scope.go:117] "RemoveContainer" containerID="ec0b71b8f568f24d2a6fb159acc1a52478d10a4167b77a31d62e3243db803450" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.776014 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerStarted","Data":"d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59"} Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.779638 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kbbl7" podStartSLOduration=1.991756745 podStartE2EDuration="52.779619011s" podCreationTimestamp="2025-12-03 05:43:53 +0000 UTC" firstStartedPulling="2025-12-03 05:43:54.209913434 +0000 UTC m=+158.145374275" lastFinishedPulling="2025-12-03 05:44:44.9977757 +0000 UTC m=+208.933236541" observedRunningTime="2025-12-03 05:44:45.779409796 +0000 UTC m=+209.714870647" watchObservedRunningTime="2025-12-03 05:44:45.779619011 +0000 UTC m=+209.715079852" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.790604 4810 scope.go:117] "RemoveContainer" containerID="5900955b574d2cd6e8d3a85610f2cb2c7f4784750bea42cf5b4159982e626969" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.828196 4810 scope.go:117] "RemoveContainer" containerID="d57d115e9651ea29066bc78a229a5b009ba879113414a71f7825444ffb835470" Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.829671 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wfl78"] Dec 03 05:44:45 crc kubenswrapper[4810]: I1203 05:44:45.833388 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wfl78"] Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.384432 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" path="/var/lib/kubelet/pods/ad9d333a-2c2a-4d28-8e28-821458b6e5f0/volumes" Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.784156 4810 generic.go:334] "Generic (PLEG): container finished" podID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerID="d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59" exitCode=0 Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.784259 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerDied","Data":"d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59"} Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.788967 4810 generic.go:334] "Generic (PLEG): container finished" podID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerID="fb0e9b9bad53390e35132c573093b8b9517088d318c69f2ce06d5180a4675ef5" exitCode=0 Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.789025 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxjsv" event={"ID":"f0b0d549-514c-4f29-901d-de91ae9e5242","Type":"ContainerDied","Data":"fb0e9b9bad53390e35132c573093b8b9517088d318c69f2ce06d5180a4675ef5"} Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.792031 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerStarted","Data":"84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75"} Dec 03 05:44:46 crc kubenswrapper[4810]: I1203 05:44:46.845213 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gj4t7" podStartSLOduration=2.877402038 podStartE2EDuration="54.845191593s" podCreationTimestamp="2025-12-03 05:43:52 +0000 UTC" firstStartedPulling="2025-12-03 05:43:54.222787848 +0000 UTC m=+158.158248689" lastFinishedPulling="2025-12-03 05:44:46.190577403 +0000 UTC m=+210.126038244" observedRunningTime="2025-12-03 05:44:46.843684242 +0000 UTC m=+210.779145083" watchObservedRunningTime="2025-12-03 05:44:46.845191593 +0000 UTC m=+210.780652434" Dec 03 05:44:47 crc kubenswrapper[4810]: I1203 05:44:47.802150 4810 generic.go:334] "Generic (PLEG): container finished" podID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerID="305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1" exitCode=0 Dec 03 05:44:47 crc kubenswrapper[4810]: I1203 05:44:47.802248 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mlr8j" event={"ID":"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d","Type":"ContainerDied","Data":"305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1"} Dec 03 05:44:47 crc kubenswrapper[4810]: I1203 05:44:47.805821 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerStarted","Data":"ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5"} Dec 03 05:44:47 crc kubenswrapper[4810]: I1203 05:44:47.808626 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxjsv" event={"ID":"f0b0d549-514c-4f29-901d-de91ae9e5242","Type":"ContainerStarted","Data":"56e265aef7ef22743cf7486cc613e75777f03f37b27598954221780ddf2ebc71"} Dec 03 05:44:47 crc kubenswrapper[4810]: I1203 05:44:47.842241 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jxjsv" podStartSLOduration=2.854364209 podStartE2EDuration="56.842220726s" podCreationTimestamp="2025-12-03 05:43:51 +0000 UTC" firstStartedPulling="2025-12-03 05:43:53.173246754 +0000 UTC m=+157.108707595" lastFinishedPulling="2025-12-03 05:44:47.161103271 +0000 UTC m=+211.096564112" observedRunningTime="2025-12-03 05:44:47.841702462 +0000 UTC m=+211.777163313" watchObservedRunningTime="2025-12-03 05:44:47.842220726 +0000 UTC m=+211.777681557" Dec 03 05:44:47 crc kubenswrapper[4810]: I1203 05:44:47.861584 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zkl7b" podStartSLOduration=2.958277401 podStartE2EDuration="54.861557608s" podCreationTimestamp="2025-12-03 05:43:53 +0000 UTC" firstStartedPulling="2025-12-03 05:43:55.297951136 +0000 UTC m=+159.233411967" lastFinishedPulling="2025-12-03 05:44:47.201231333 +0000 UTC m=+211.136692174" observedRunningTime="2025-12-03 05:44:47.85976919 +0000 UTC m=+211.795230111" watchObservedRunningTime="2025-12-03 05:44:47.861557608 +0000 UTC m=+211.797018449" Dec 03 05:44:48 crc kubenswrapper[4810]: I1203 05:44:48.818206 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mlr8j" event={"ID":"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d","Type":"ContainerStarted","Data":"bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a"} Dec 03 05:44:48 crc kubenswrapper[4810]: I1203 05:44:48.839079 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mlr8j" podStartSLOduration=3.8107879540000003 podStartE2EDuration="59.839060285s" podCreationTimestamp="2025-12-03 05:43:49 +0000 UTC" firstStartedPulling="2025-12-03 05:43:52.154517588 +0000 UTC m=+156.089978429" lastFinishedPulling="2025-12-03 05:44:48.182789909 +0000 UTC m=+212.118250760" observedRunningTime="2025-12-03 05:44:48.837608996 +0000 UTC m=+212.773069867" watchObservedRunningTime="2025-12-03 05:44:48.839060285 +0000 UTC m=+212.774521126" Dec 03 05:44:49 crc kubenswrapper[4810]: I1203 05:44:49.824696 4810 generic.go:334] "Generic (PLEG): container finished" podID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerID="47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4" exitCode=0 Dec 03 05:44:49 crc kubenswrapper[4810]: I1203 05:44:49.824766 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nzdh5" event={"ID":"a3b0cacb-bba2-4966-9b2d-28a0410908ab","Type":"ContainerDied","Data":"47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4"} Dec 03 05:44:50 crc kubenswrapper[4810]: I1203 05:44:50.165886 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:44:50 crc kubenswrapper[4810]: I1203 05:44:50.165987 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:44:50 crc kubenswrapper[4810]: I1203 05:44:50.211088 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.167372 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.167444 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.205824 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.586002 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.586049 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.628621 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.876039 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:44:52 crc kubenswrapper[4810]: I1203 05:44:52.901946 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.373179 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.373224 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.412929 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.562890 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gj4t7"] Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.813834 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.814158 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.867420 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:44:53 crc kubenswrapper[4810]: I1203 05:44:53.887603 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:44:54 crc kubenswrapper[4810]: I1203 05:44:54.850776 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gj4t7" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="registry-server" containerID="cri-o://84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75" gracePeriod=2 Dec 03 05:44:54 crc kubenswrapper[4810]: I1203 05:44:54.893875 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:44:55 crc kubenswrapper[4810]: I1203 05:44:55.677562 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:44:55 crc kubenswrapper[4810]: I1203 05:44:55.677630 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:44:55 crc kubenswrapper[4810]: I1203 05:44:55.677687 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:44:55 crc kubenswrapper[4810]: I1203 05:44:55.678278 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 05:44:55 crc kubenswrapper[4810]: I1203 05:44:55.678380 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97" gracePeriod=600 Dec 03 05:44:55 crc kubenswrapper[4810]: I1203 05:44:55.858810 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nzdh5" event={"ID":"a3b0cacb-bba2-4966-9b2d-28a0410908ab","Type":"ContainerStarted","Data":"a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1"} Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.526123 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.576669 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zkl7b"] Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.660194 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-catalog-content\") pod \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.660288 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-utilities\") pod \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.660313 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcklw\" (UniqueName: \"kubernetes.io/projected/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-kube-api-access-qcklw\") pod \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\" (UID: \"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2\") " Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.661268 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-utilities" (OuterVolumeSpecName: "utilities") pod "4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" (UID: "4f4a5cea-9a0e-4387-b894-7ac5d56dcde2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.666372 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-kube-api-access-qcklw" (OuterVolumeSpecName: "kube-api-access-qcklw") pod "4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" (UID: "4f4a5cea-9a0e-4387-b894-7ac5d56dcde2"). InnerVolumeSpecName "kube-api-access-qcklw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.693200 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" (UID: "4f4a5cea-9a0e-4387-b894-7ac5d56dcde2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.762151 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.762569 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.762582 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcklw\" (UniqueName: \"kubernetes.io/projected/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2-kube-api-access-qcklw\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.866952 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerID="84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75" exitCode=0 Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.867020 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gj4t7" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.867044 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerDied","Data":"84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75"} Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.867078 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gj4t7" event={"ID":"4f4a5cea-9a0e-4387-b894-7ac5d56dcde2","Type":"ContainerDied","Data":"557a219016ae0ac8c53f06063c0636c89db848975a29a49d4b13653851573231"} Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.867099 4810 scope.go:117] "RemoveContainer" containerID="84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.870360 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97" exitCode=0 Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.870622 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zkl7b" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="registry-server" containerID="cri-o://ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5" gracePeriod=2 Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.870896 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97"} Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.870937 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"bbc0200392c33056613f2d06cbccb7949a4ac2b9f3cdb5326da9ff914ce1c363"} Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.895234 4810 scope.go:117] "RemoveContainer" containerID="efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.917985 4810 scope.go:117] "RemoveContainer" containerID="f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.920695 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nzdh5" podStartSLOduration=4.337141172 podStartE2EDuration="1m6.920677115s" podCreationTimestamp="2025-12-03 05:43:50 +0000 UTC" firstStartedPulling="2025-12-03 05:43:52.133145524 +0000 UTC m=+156.068606365" lastFinishedPulling="2025-12-03 05:44:54.716681467 +0000 UTC m=+218.652142308" observedRunningTime="2025-12-03 05:44:56.918567328 +0000 UTC m=+220.854028179" watchObservedRunningTime="2025-12-03 05:44:56.920677115 +0000 UTC m=+220.856137956" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.938057 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gj4t7"] Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.940286 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gj4t7"] Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.943984 4810 scope.go:117] "RemoveContainer" containerID="84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75" Dec 03 05:44:56 crc kubenswrapper[4810]: E1203 05:44:56.944385 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75\": container with ID starting with 84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75 not found: ID does not exist" containerID="84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.944454 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75"} err="failed to get container status \"84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75\": rpc error: code = NotFound desc = could not find container \"84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75\": container with ID starting with 84153621d57ab0b9ff2af1117640a8964712bbd81c7c80e798288e32d2f82c75 not found: ID does not exist" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.944474 4810 scope.go:117] "RemoveContainer" containerID="efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af" Dec 03 05:44:56 crc kubenswrapper[4810]: E1203 05:44:56.944691 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af\": container with ID starting with efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af not found: ID does not exist" containerID="efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.944708 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af"} err="failed to get container status \"efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af\": rpc error: code = NotFound desc = could not find container \"efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af\": container with ID starting with efa70361fd169e3916dc00e42519909f202f27daa379f12589ec6049285466af not found: ID does not exist" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.944722 4810 scope.go:117] "RemoveContainer" containerID="f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918" Dec 03 05:44:56 crc kubenswrapper[4810]: E1203 05:44:56.944951 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918\": container with ID starting with f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918 not found: ID does not exist" containerID="f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918" Dec 03 05:44:56 crc kubenswrapper[4810]: I1203 05:44:56.944971 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918"} err="failed to get container status \"f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918\": rpc error: code = NotFound desc = could not find container \"f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918\": container with ID starting with f6c59649780a91b8fd59f5fcf6514553639f0f9cadbe8c96b45597366c9a5918 not found: ID does not exist" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.229473 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.372668 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fgzd\" (UniqueName: \"kubernetes.io/projected/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-kube-api-access-5fgzd\") pod \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.372925 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-catalog-content\") pod \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.372984 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-utilities\") pod \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\" (UID: \"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427\") " Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.373707 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-utilities" (OuterVolumeSpecName: "utilities") pod "dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" (UID: "dd2bc6c4-3ca3-49c8-b988-a1dd315d1427"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.379336 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-kube-api-access-5fgzd" (OuterVolumeSpecName: "kube-api-access-5fgzd") pod "dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" (UID: "dd2bc6c4-3ca3-49c8-b988-a1dd315d1427"). InnerVolumeSpecName "kube-api-access-5fgzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.474831 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.474864 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fgzd\" (UniqueName: \"kubernetes.io/projected/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-kube-api-access-5fgzd\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.487468 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" (UID: "dd2bc6c4-3ca3-49c8-b988-a1dd315d1427"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.575579 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.883058 4810 generic.go:334] "Generic (PLEG): container finished" podID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerID="ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5" exitCode=0 Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.883142 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerDied","Data":"ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5"} Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.883165 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zkl7b" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.883211 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zkl7b" event={"ID":"dd2bc6c4-3ca3-49c8-b988-a1dd315d1427","Type":"ContainerDied","Data":"6dadda76a05c87541467d120830059dbd89e06e8d17bef8d24882de62bf8eadf"} Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.883242 4810 scope.go:117] "RemoveContainer" containerID="ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.906436 4810 scope.go:117] "RemoveContainer" containerID="d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.927195 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zkl7b"] Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.927499 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zkl7b"] Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.946474 4810 scope.go:117] "RemoveContainer" containerID="07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.960843 4810 scope.go:117] "RemoveContainer" containerID="ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5" Dec 03 05:44:57 crc kubenswrapper[4810]: E1203 05:44:57.961358 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5\": container with ID starting with ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5 not found: ID does not exist" containerID="ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.961416 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5"} err="failed to get container status \"ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5\": rpc error: code = NotFound desc = could not find container \"ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5\": container with ID starting with ab44d8bfe75fce1e074e8fde59527957f7868fca166d245331501c2260c0ccf5 not found: ID does not exist" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.961449 4810 scope.go:117] "RemoveContainer" containerID="d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59" Dec 03 05:44:57 crc kubenswrapper[4810]: E1203 05:44:57.961937 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59\": container with ID starting with d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59 not found: ID does not exist" containerID="d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.961963 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59"} err="failed to get container status \"d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59\": rpc error: code = NotFound desc = could not find container \"d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59\": container with ID starting with d47dc04b09d58aed6d43cfde8430ec6be7fd52502419fe17e0de89f4c7919e59 not found: ID does not exist" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.961979 4810 scope.go:117] "RemoveContainer" containerID="07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2" Dec 03 05:44:57 crc kubenswrapper[4810]: E1203 05:44:57.962226 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2\": container with ID starting with 07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2 not found: ID does not exist" containerID="07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2" Dec 03 05:44:57 crc kubenswrapper[4810]: I1203 05:44:57.962247 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2"} err="failed to get container status \"07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2\": rpc error: code = NotFound desc = could not find container \"07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2\": container with ID starting with 07a20383839e8dd12635c9f3e3210f09d95954df46a6d599ceb639ffeba609f2 not found: ID does not exist" Dec 03 05:44:58 crc kubenswrapper[4810]: I1203 05:44:58.390014 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" path="/var/lib/kubelet/pods/4f4a5cea-9a0e-4387-b894-7ac5d56dcde2/volumes" Dec 03 05:44:58 crc kubenswrapper[4810]: I1203 05:44:58.390954 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" path="/var/lib/kubelet/pods/dd2bc6c4-3ca3-49c8-b988-a1dd315d1427/volumes" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.146531 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f"] Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147447 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147471 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147494 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="extract-utilities" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147508 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="extract-utilities" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147525 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="extract-content" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147536 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="extract-content" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147552 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="extract-content" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147563 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="extract-content" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147583 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="extract-content" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147594 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="extract-content" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147610 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147620 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147679 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="extract-utilities" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147691 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="extract-utilities" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147708 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="extract-utilities" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147720 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="extract-utilities" Dec 03 05:45:00 crc kubenswrapper[4810]: E1203 05:45:00.147757 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147769 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147931 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd2bc6c4-3ca3-49c8-b988-a1dd315d1427" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147953 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f4a5cea-9a0e-4387-b894-7ac5d56dcde2" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.147975 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9d333a-2c2a-4d28-8e28-821458b6e5f0" containerName="registry-server" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.148626 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.152087 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.152547 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.161282 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f"] Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.219664 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.316364 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlbc7\" (UniqueName: \"kubernetes.io/projected/4a1d62d9-43bd-40c8-8f37-466909c07065-kube-api-access-dlbc7\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.316421 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a1d62d9-43bd-40c8-8f37-466909c07065-secret-volume\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.316703 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a1d62d9-43bd-40c8-8f37-466909c07065-config-volume\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.418335 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a1d62d9-43bd-40c8-8f37-466909c07065-config-volume\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.418441 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlbc7\" (UniqueName: \"kubernetes.io/projected/4a1d62d9-43bd-40c8-8f37-466909c07065-kube-api-access-dlbc7\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.418466 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a1d62d9-43bd-40c8-8f37-466909c07065-secret-volume\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.419209 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a1d62d9-43bd-40c8-8f37-466909c07065-config-volume\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.429610 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a1d62d9-43bd-40c8-8f37-466909c07065-secret-volume\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.439550 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlbc7\" (UniqueName: \"kubernetes.io/projected/4a1d62d9-43bd-40c8-8f37-466909c07065-kube-api-access-dlbc7\") pod \"collect-profiles-29412345-6sv8f\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.483386 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.602196 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.603159 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.671887 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.698573 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f"] Dec 03 05:45:00 crc kubenswrapper[4810]: W1203 05:45:00.706955 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a1d62d9_43bd_40c8_8f37_466909c07065.slice/crio-02489c5dfb618f82fae9c7f03f61d6495a7d5804d6301c13f546cd37971a6f7e WatchSource:0}: Error finding container 02489c5dfb618f82fae9c7f03f61d6495a7d5804d6301c13f546cd37971a6f7e: Status 404 returned error can't find the container with id 02489c5dfb618f82fae9c7f03f61d6495a7d5804d6301c13f546cd37971a6f7e Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.903477 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" event={"ID":"4a1d62d9-43bd-40c8-8f37-466909c07065","Type":"ContainerStarted","Data":"794c65ba7f08ef9ab7d972f445647e1fc09bde9b6cd89e490e05545a625efc03"} Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.903524 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" event={"ID":"4a1d62d9-43bd-40c8-8f37-466909c07065","Type":"ContainerStarted","Data":"02489c5dfb618f82fae9c7f03f61d6495a7d5804d6301c13f546cd37971a6f7e"} Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.920448 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" podStartSLOduration=0.920433113 podStartE2EDuration="920.433113ms" podCreationTimestamp="2025-12-03 05:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:45:00.917684539 +0000 UTC m=+224.853145380" watchObservedRunningTime="2025-12-03 05:45:00.920433113 +0000 UTC m=+224.855893944" Dec 03 05:45:00 crc kubenswrapper[4810]: I1203 05:45:00.949107 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:45:01 crc kubenswrapper[4810]: I1203 05:45:01.910157 4810 generic.go:334] "Generic (PLEG): container finished" podID="4a1d62d9-43bd-40c8-8f37-466909c07065" containerID="794c65ba7f08ef9ab7d972f445647e1fc09bde9b6cd89e490e05545a625efc03" exitCode=0 Dec 03 05:45:01 crc kubenswrapper[4810]: I1203 05:45:01.910244 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" event={"ID":"4a1d62d9-43bd-40c8-8f37-466909c07065","Type":"ContainerDied","Data":"794c65ba7f08ef9ab7d972f445647e1fc09bde9b6cd89e490e05545a625efc03"} Dec 03 05:45:02 crc kubenswrapper[4810]: I1203 05:45:02.364859 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nzdh5"] Dec 03 05:45:02 crc kubenswrapper[4810]: I1203 05:45:02.739171 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nbzmx"] Dec 03 05:45:02 crc kubenswrapper[4810]: I1203 05:45:02.915131 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nzdh5" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="registry-server" containerID="cri-o://a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1" gracePeriod=2 Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.176517 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.257536 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a1d62d9-43bd-40c8-8f37-466909c07065-secret-volume\") pod \"4a1d62d9-43bd-40c8-8f37-466909c07065\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.257657 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a1d62d9-43bd-40c8-8f37-466909c07065-config-volume\") pod \"4a1d62d9-43bd-40c8-8f37-466909c07065\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.257763 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlbc7\" (UniqueName: \"kubernetes.io/projected/4a1d62d9-43bd-40c8-8f37-466909c07065-kube-api-access-dlbc7\") pod \"4a1d62d9-43bd-40c8-8f37-466909c07065\" (UID: \"4a1d62d9-43bd-40c8-8f37-466909c07065\") " Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.258717 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a1d62d9-43bd-40c8-8f37-466909c07065-config-volume" (OuterVolumeSpecName: "config-volume") pod "4a1d62d9-43bd-40c8-8f37-466909c07065" (UID: "4a1d62d9-43bd-40c8-8f37-466909c07065"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.266120 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a1d62d9-43bd-40c8-8f37-466909c07065-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4a1d62d9-43bd-40c8-8f37-466909c07065" (UID: "4a1d62d9-43bd-40c8-8f37-466909c07065"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.266246 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a1d62d9-43bd-40c8-8f37-466909c07065-kube-api-access-dlbc7" (OuterVolumeSpecName: "kube-api-access-dlbc7") pod "4a1d62d9-43bd-40c8-8f37-466909c07065" (UID: "4a1d62d9-43bd-40c8-8f37-466909c07065"). InnerVolumeSpecName "kube-api-access-dlbc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.267505 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.359056 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-catalog-content\") pod \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.359168 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4s57\" (UniqueName: \"kubernetes.io/projected/a3b0cacb-bba2-4966-9b2d-28a0410908ab-kube-api-access-j4s57\") pod \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.359248 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-utilities\") pod \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\" (UID: \"a3b0cacb-bba2-4966-9b2d-28a0410908ab\") " Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.359593 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a1d62d9-43bd-40c8-8f37-466909c07065-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.359622 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a1d62d9-43bd-40c8-8f37-466909c07065-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.359634 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlbc7\" (UniqueName: \"kubernetes.io/projected/4a1d62d9-43bd-40c8-8f37-466909c07065-kube-api-access-dlbc7\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.360796 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-utilities" (OuterVolumeSpecName: "utilities") pod "a3b0cacb-bba2-4966-9b2d-28a0410908ab" (UID: "a3b0cacb-bba2-4966-9b2d-28a0410908ab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.364970 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3b0cacb-bba2-4966-9b2d-28a0410908ab-kube-api-access-j4s57" (OuterVolumeSpecName: "kube-api-access-j4s57") pod "a3b0cacb-bba2-4966-9b2d-28a0410908ab" (UID: "a3b0cacb-bba2-4966-9b2d-28a0410908ab"). InnerVolumeSpecName "kube-api-access-j4s57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.418292 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3b0cacb-bba2-4966-9b2d-28a0410908ab" (UID: "a3b0cacb-bba2-4966-9b2d-28a0410908ab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.461710 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.461787 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4s57\" (UniqueName: \"kubernetes.io/projected/a3b0cacb-bba2-4966-9b2d-28a0410908ab-kube-api-access-j4s57\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.461803 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3b0cacb-bba2-4966-9b2d-28a0410908ab-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.923075 4810 generic.go:334] "Generic (PLEG): container finished" podID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerID="a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1" exitCode=0 Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.923130 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nzdh5" event={"ID":"a3b0cacb-bba2-4966-9b2d-28a0410908ab","Type":"ContainerDied","Data":"a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1"} Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.923170 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nzdh5" event={"ID":"a3b0cacb-bba2-4966-9b2d-28a0410908ab","Type":"ContainerDied","Data":"713798688aca7cad2a2021fefc9d74b5d7ef36876b0a57234dfbb956e4246c72"} Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.923175 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nzdh5" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.923196 4810 scope.go:117] "RemoveContainer" containerID="a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.924674 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" event={"ID":"4a1d62d9-43bd-40c8-8f37-466909c07065","Type":"ContainerDied","Data":"02489c5dfb618f82fae9c7f03f61d6495a7d5804d6301c13f546cd37971a6f7e"} Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.924697 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.924714 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02489c5dfb618f82fae9c7f03f61d6495a7d5804d6301c13f546cd37971a6f7e" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.941587 4810 scope.go:117] "RemoveContainer" containerID="47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.954031 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nzdh5"] Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.959875 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nzdh5"] Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.966071 4810 scope.go:117] "RemoveContainer" containerID="a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.977723 4810 scope.go:117] "RemoveContainer" containerID="a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1" Dec 03 05:45:03 crc kubenswrapper[4810]: E1203 05:45:03.978158 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1\": container with ID starting with a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1 not found: ID does not exist" containerID="a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.978189 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1"} err="failed to get container status \"a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1\": rpc error: code = NotFound desc = could not find container \"a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1\": container with ID starting with a4724b12ae927a08e6144d437c6ff634e2a771a7fc030ef339114357ca3cddc1 not found: ID does not exist" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.978212 4810 scope.go:117] "RemoveContainer" containerID="47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4" Dec 03 05:45:03 crc kubenswrapper[4810]: E1203 05:45:03.978528 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4\": container with ID starting with 47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4 not found: ID does not exist" containerID="47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.978583 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4"} err="failed to get container status \"47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4\": rpc error: code = NotFound desc = could not find container \"47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4\": container with ID starting with 47c9bda1046d502627da1c46e018510c68f9eef596a857f19e5517f6ae5ce2f4 not found: ID does not exist" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.978619 4810 scope.go:117] "RemoveContainer" containerID="a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a" Dec 03 05:45:03 crc kubenswrapper[4810]: E1203 05:45:03.979029 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a\": container with ID starting with a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a not found: ID does not exist" containerID="a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a" Dec 03 05:45:03 crc kubenswrapper[4810]: I1203 05:45:03.979057 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a"} err="failed to get container status \"a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a\": rpc error: code = NotFound desc = could not find container \"a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a\": container with ID starting with a98022a9d7068245abc1df569a5e996a319e23cddb42d657e023fd4ec8c4df7a not found: ID does not exist" Dec 03 05:45:04 crc kubenswrapper[4810]: I1203 05:45:04.383474 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" path="/var/lib/kubelet/pods/a3b0cacb-bba2-4966-9b2d-28a0410908ab/volumes" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.016316 4810 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.017311 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="registry-server" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017324 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="registry-server" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.017337 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="extract-utilities" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017344 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="extract-utilities" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.017358 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="extract-content" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017363 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="extract-content" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.017372 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d62d9-43bd-40c8-8f37-466909c07065" containerName="collect-profiles" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017377 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d62d9-43bd-40c8-8f37-466909c07065" containerName="collect-profiles" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017476 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3b0cacb-bba2-4966-9b2d-28a0410908ab" containerName="registry-server" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017492 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a1d62d9-43bd-40c8-8f37-466909c07065" containerName="collect-profiles" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.017845 4810 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.018089 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0" gracePeriod=15 Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.018144 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.018146 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9" gracePeriod=15 Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.018185 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b" gracePeriod=15 Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.018217 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49" gracePeriod=15 Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.018302 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d" gracePeriod=15 Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019194 4810 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.019451 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019472 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.019489 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019501 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.019522 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019534 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.019552 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019565 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.019594 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019606 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.019629 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019640 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019850 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019869 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019885 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019903 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019927 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.019942 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.020150 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.020165 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.072091 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.117685 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.117777 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.117815 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.117878 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.117959 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.118010 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.118029 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.118047 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219633 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219703 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219762 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219806 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219827 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219846 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219796 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219907 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219874 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219920 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219951 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219937 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219969 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.220003 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.220051 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.219839 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.356170 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:45:15 crc kubenswrapper[4810]: W1203 05:45:15.378884 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-475406e54801e6380d1d2f5a2d9793b113bb0d77c3663c063637c805afe2eb8b WatchSource:0}: Error finding container 475406e54801e6380d1d2f5a2d9793b113bb0d77c3663c063637c805afe2eb8b: Status 404 returned error can't find the container with id 475406e54801e6380d1d2f5a2d9793b113bb0d77c3663c063637c805afe2eb8b Dec 03 05:45:15 crc kubenswrapper[4810]: E1203 05:45:15.381945 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.23:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d9e54a4f4cb0d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 05:45:15.381050125 +0000 UTC m=+239.316510976,LastTimestamp:2025-12-03 05:45:15.381050125 +0000 UTC m=+239.316510976,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.999064 4810 generic.go:334] "Generic (PLEG): container finished" podID="2cd24987-2a4f-406c-a597-95ddf6ad7908" containerID="b4fd46350162278bd02f2655a1d2489ece39dba9d34b3dc0dda4a0ad67b7476f" exitCode=0 Dec 03 05:45:15 crc kubenswrapper[4810]: I1203 05:45:15.999188 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2cd24987-2a4f-406c-a597-95ddf6ad7908","Type":"ContainerDied","Data":"b4fd46350162278bd02f2655a1d2489ece39dba9d34b3dc0dda4a0ad67b7476f"} Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.001590 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.002376 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.002907 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045"} Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.003112 4810 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.003145 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"475406e54801e6380d1d2f5a2d9793b113bb0d77c3663c063637c805afe2eb8b"} Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.004029 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.004886 4810 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.005502 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.007105 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.009405 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.011078 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9" exitCode=0 Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.011139 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d" exitCode=0 Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.011166 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b" exitCode=0 Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.011191 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49" exitCode=2 Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.011218 4810 scope.go:117] "RemoveContainer" containerID="1783f04e6a61d25b4a795dcaf4810d4299324d0ae67c21a43bbf990075928226" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.379921 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.380324 4810 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:16 crc kubenswrapper[4810]: I1203 05:45:16.380611 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.026067 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.391189 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.392602 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.392894 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.395573 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.396241 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.396693 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.397347 4810 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.397762 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.451445 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd24987-2a4f-406c-a597-95ddf6ad7908-kube-api-access\") pod \"2cd24987-2a4f-406c-a597-95ddf6ad7908\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.451783 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-var-lock\") pod \"2cd24987-2a4f-406c-a597-95ddf6ad7908\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.451888 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-var-lock" (OuterVolumeSpecName: "var-lock") pod "2cd24987-2a4f-406c-a597-95ddf6ad7908" (UID: "2cd24987-2a4f-406c-a597-95ddf6ad7908"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.451915 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-kubelet-dir\") pod \"2cd24987-2a4f-406c-a597-95ddf6ad7908\" (UID: \"2cd24987-2a4f-406c-a597-95ddf6ad7908\") " Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.452092 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2cd24987-2a4f-406c-a597-95ddf6ad7908" (UID: "2cd24987-2a4f-406c-a597-95ddf6ad7908"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.452832 4810 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.452870 4810 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd24987-2a4f-406c-a597-95ddf6ad7908-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.459016 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd24987-2a4f-406c-a597-95ddf6ad7908-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2cd24987-2a4f-406c-a597-95ddf6ad7908" (UID: "2cd24987-2a4f-406c-a597-95ddf6ad7908"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554336 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554400 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554494 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554591 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554600 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554624 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554814 4810 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554825 4810 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554833 4810 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:17 crc kubenswrapper[4810]: I1203 05:45:17.554842 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd24987-2a4f-406c-a597-95ddf6ad7908-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.045502 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.045864 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2cd24987-2a4f-406c-a597-95ddf6ad7908","Type":"ContainerDied","Data":"7b0fd2ed52efa8c943a1a84a3825f6fa40845b3c8c64cada4e8161750270ce1f"} Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.045918 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b0fd2ed52efa8c943a1a84a3825f6fa40845b3c8c64cada4e8161750270ce1f" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.051508 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.052418 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0" exitCode=0 Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.052511 4810 scope.go:117] "RemoveContainer" containerID="5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.052558 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.067106 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.067600 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.068063 4810 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.074658 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.076380 4810 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.077498 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.091911 4810 scope.go:117] "RemoveContainer" containerID="565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.105242 4810 scope.go:117] "RemoveContainer" containerID="d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.123723 4810 scope.go:117] "RemoveContainer" containerID="79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.138135 4810 scope.go:117] "RemoveContainer" containerID="80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.155885 4810 scope.go:117] "RemoveContainer" containerID="07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.173854 4810 scope.go:117] "RemoveContainer" containerID="5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.174257 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\": container with ID starting with 5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9 not found: ID does not exist" containerID="5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.174289 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9"} err="failed to get container status \"5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\": rpc error: code = NotFound desc = could not find container \"5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9\": container with ID starting with 5c2e6a178b57948bbd70fb6550fe55fc1df8bbce2f2422ed45ef6bc0471dc8b9 not found: ID does not exist" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.174312 4810 scope.go:117] "RemoveContainer" containerID="565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.174939 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\": container with ID starting with 565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d not found: ID does not exist" containerID="565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.174971 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d"} err="failed to get container status \"565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\": rpc error: code = NotFound desc = could not find container \"565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d\": container with ID starting with 565ab1f69aae9d45cd27011ac0d2d9cfce987805bd68b7e6210f270a92d75b7d not found: ID does not exist" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.174995 4810 scope.go:117] "RemoveContainer" containerID="d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.175269 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\": container with ID starting with d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b not found: ID does not exist" containerID="d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.175390 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b"} err="failed to get container status \"d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\": rpc error: code = NotFound desc = could not find container \"d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b\": container with ID starting with d47fcf65ad4e39773111d4b2ac6d9f67e7e42533541cfd586a0af71261058e7b not found: ID does not exist" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.175510 4810 scope.go:117] "RemoveContainer" containerID="79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.176606 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\": container with ID starting with 79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49 not found: ID does not exist" containerID="79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.176651 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49"} err="failed to get container status \"79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\": rpc error: code = NotFound desc = could not find container \"79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49\": container with ID starting with 79c99627181c238a2cd955ea21e61036f37a8f981c797823e9992caf732b9b49 not found: ID does not exist" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.176682 4810 scope.go:117] "RemoveContainer" containerID="80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.177065 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\": container with ID starting with 80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0 not found: ID does not exist" containerID="80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.177100 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0"} err="failed to get container status \"80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\": rpc error: code = NotFound desc = could not find container \"80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0\": container with ID starting with 80fefedb8dcd1d2775514dde993d526bf397247e37c61b07f9c005ce8a1ec4d0 not found: ID does not exist" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.177122 4810 scope.go:117] "RemoveContainer" containerID="07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.177468 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\": container with ID starting with 07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218 not found: ID does not exist" containerID="07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.177572 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218"} err="failed to get container status \"07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\": rpc error: code = NotFound desc = could not find container \"07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218\": container with ID starting with 07931dceaadba2e4b9180b2fb97fb84f2594a66774765e599b6847c21bb15218 not found: ID does not exist" Dec 03 05:45:18 crc kubenswrapper[4810]: I1203 05:45:18.385419 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 05:45:18 crc kubenswrapper[4810]: E1203 05:45:18.869996 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.23:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d9e54a4f4cb0d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 05:45:15.381050125 +0000 UTC m=+239.316510976,LastTimestamp:2025-12-03 05:45:15.381050125 +0000 UTC m=+239.316510976,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.775487 4810 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.776075 4810 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.776766 4810 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.777347 4810 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.777683 4810 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:19 crc kubenswrapper[4810]: I1203 05:45:19.777724 4810 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.778071 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="200ms" Dec 03 05:45:19 crc kubenswrapper[4810]: E1203 05:45:19.980217 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="400ms" Dec 03 05:45:20 crc kubenswrapper[4810]: E1203 05:45:20.381545 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="800ms" Dec 03 05:45:21 crc kubenswrapper[4810]: E1203 05:45:21.183599 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="1.6s" Dec 03 05:45:22 crc kubenswrapper[4810]: E1203 05:45:22.784604 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="3.2s" Dec 03 05:45:25 crc kubenswrapper[4810]: E1203 05:45:25.986705 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.23:6443: connect: connection refused" interval="6.4s" Dec 03 05:45:26 crc kubenswrapper[4810]: I1203 05:45:26.380392 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:26 crc kubenswrapper[4810]: I1203 05:45:26.381210 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:27 crc kubenswrapper[4810]: I1203 05:45:27.764724 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" containerName="oauth-openshift" containerID="cri-o://2929e5132c69e48f307226eeba8936498e253f8b451b8f6e4bac51f40a12d71a" gracePeriod=15 Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.130543 4810 generic.go:334] "Generic (PLEG): container finished" podID="2f676559-93f9-4af5-9079-2d1edaa8862f" containerID="2929e5132c69e48f307226eeba8936498e253f8b451b8f6e4bac51f40a12d71a" exitCode=0 Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.130670 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" event={"ID":"2f676559-93f9-4af5-9079-2d1edaa8862f","Type":"ContainerDied","Data":"2929e5132c69e48f307226eeba8936498e253f8b451b8f6e4bac51f40a12d71a"} Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.713373 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.714486 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.715161 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.715782 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.807963 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-router-certs\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808021 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-service-ca\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808071 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-trusted-ca-bundle\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808103 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-session\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808130 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-idp-0-file-data\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808163 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-cliconfig\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808195 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-ocp-branding-template\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.808222 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-error\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809014 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-login\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809128 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6dph\" (UniqueName: \"kubernetes.io/projected/2f676559-93f9-4af5-9079-2d1edaa8862f-kube-api-access-r6dph\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809207 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-provider-selection\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809358 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-serving-cert\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809444 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-dir\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809442 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809493 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809513 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-policies\") pod \"2f676559-93f9-4af5-9079-2d1edaa8862f\" (UID: \"2f676559-93f9-4af5-9079-2d1edaa8862f\") " Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.809827 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810388 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810464 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810664 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810721 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810783 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810806 4810 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.810831 4810 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2f676559-93f9-4af5-9079-2d1edaa8862f-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.817427 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.817726 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.817794 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f676559-93f9-4af5-9079-2d1edaa8862f-kube-api-access-r6dph" (OuterVolumeSpecName: "kube-api-access-r6dph") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "kube-api-access-r6dph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.818120 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.818631 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.818985 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.819467 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.821170 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.821643 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2f676559-93f9-4af5-9079-2d1edaa8862f" (UID: "2f676559-93f9-4af5-9079-2d1edaa8862f"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:45:28 crc kubenswrapper[4810]: E1203 05:45:28.871806 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.23:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d9e54a4f4cb0d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 05:45:15.381050125 +0000 UTC m=+239.316510976,LastTimestamp:2025-12-03 05:45:15.381050125 +0000 UTC m=+239.316510976,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912536 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912584 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912600 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912615 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912632 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912646 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912658 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912671 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6dph\" (UniqueName: \"kubernetes.io/projected/2f676559-93f9-4af5-9079-2d1edaa8862f-kube-api-access-r6dph\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:28 crc kubenswrapper[4810]: I1203 05:45:28.912685 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2f676559-93f9-4af5-9079-2d1edaa8862f-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.138352 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.138574 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" event={"ID":"2f676559-93f9-4af5-9079-2d1edaa8862f","Type":"ContainerDied","Data":"d7a7ec13cf6d510bb5f25ae28a8ff6ff3fd3f71f0e7a88137154ee0971b16e7f"} Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.139003 4810 scope.go:117] "RemoveContainer" containerID="2929e5132c69e48f307226eeba8936498e253f8b451b8f6e4bac51f40a12d71a" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.139608 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.140054 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.140713 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.141203 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.141236 4810 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d" exitCode=1 Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.141256 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d"} Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.141556 4810 scope.go:117] "RemoveContainer" containerID="b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.142668 4810 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.143131 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.143345 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.143549 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.159567 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.160303 4810 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.160857 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.161341 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.376832 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.377542 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.378069 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.378418 4810 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.378676 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.390451 4810 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.390481 4810 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:29 crc kubenswrapper[4810]: E1203 05:45:29.390949 4810 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:29 crc kubenswrapper[4810]: I1203 05:45:29.391621 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:29 crc kubenswrapper[4810]: W1203 05:45:29.406878 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-ebaa25adf13210a526713103162067b598c3e185f229e47b63417fa83abbe0b0 WatchSource:0}: Error finding container ebaa25adf13210a526713103162067b598c3e185f229e47b63417fa83abbe0b0: Status 404 returned error can't find the container with id ebaa25adf13210a526713103162067b598c3e185f229e47b63417fa83abbe0b0 Dec 03 05:45:29 crc kubenswrapper[4810]: E1203 05:45:29.460608 4810 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.23:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" volumeName="registry-storage" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.152976 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.153433 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"01098c882742c12fb6d8b9bf90d8124791ca772003134ce94d35a9b5ab3a3f20"} Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.154486 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.155037 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.155436 4810 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.155683 4810 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="57e8289d02424144dc519a9f913f804c1601868de5912379e985caab2c718ed1" exitCode=0 Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.155707 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"57e8289d02424144dc519a9f913f804c1601868de5912379e985caab2c718ed1"} Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.155765 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ebaa25adf13210a526713103162067b598c3e185f229e47b63417fa83abbe0b0"} Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.155806 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.156121 4810 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.156164 4810 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:30 crc kubenswrapper[4810]: E1203 05:45:30.156487 4810 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.156899 4810 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.157340 4810 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.157646 4810 status_manager.go:851] "Failed to get status for pod" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:30 crc kubenswrapper[4810]: I1203 05:45:30.158024 4810 status_manager.go:851] "Failed to get status for pod" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" pod="openshift-authentication/oauth-openshift-558db77b4-nbzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-nbzmx\": dial tcp 38.102.83.23:6443: connect: connection refused" Dec 03 05:45:31 crc kubenswrapper[4810]: I1203 05:45:31.164128 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d8ced1ec6acf19a0d92e2ad67dcd987f4005a90ddc0488d8418f5cfdaba2b61d"} Dec 03 05:45:31 crc kubenswrapper[4810]: I1203 05:45:31.164619 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6db22d630ca120c3c7cbf94d380e7c648a154a41437988a26470b1e14212b32d"} Dec 03 05:45:31 crc kubenswrapper[4810]: I1203 05:45:31.164631 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b4dbb8e0d9926c196653942103fb9753c3cff99cac725d3dd3d905946e687a8f"} Dec 03 05:45:31 crc kubenswrapper[4810]: I1203 05:45:31.164644 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bcda494ccf66dae9375300ade9451d8b16653c546349b8b2e7dc3b0239fedc07"} Dec 03 05:45:32 crc kubenswrapper[4810]: I1203 05:45:32.172862 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8b0a1bb5243fcbe76b12b719d10ba894246c1df459754378003fcd393203804e"} Dec 03 05:45:32 crc kubenswrapper[4810]: I1203 05:45:32.173243 4810 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:32 crc kubenswrapper[4810]: I1203 05:45:32.173258 4810 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:32 crc kubenswrapper[4810]: I1203 05:45:32.173389 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:34 crc kubenswrapper[4810]: I1203 05:45:34.392182 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:34 crc kubenswrapper[4810]: I1203 05:45:34.392436 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:34 crc kubenswrapper[4810]: I1203 05:45:34.400111 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:35 crc kubenswrapper[4810]: I1203 05:45:35.334043 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:45:35 crc kubenswrapper[4810]: I1203 05:45:35.334394 4810 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 05:45:35 crc kubenswrapper[4810]: I1203 05:45:35.334491 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 05:45:37 crc kubenswrapper[4810]: I1203 05:45:37.182403 4810 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:37 crc kubenswrapper[4810]: I1203 05:45:37.281654 4810 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="22324bd0-10ba-4033-bda4-d29f12fdfbd3" Dec 03 05:45:37 crc kubenswrapper[4810]: I1203 05:45:37.579220 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:45:38 crc kubenswrapper[4810]: I1203 05:45:38.212099 4810 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:38 crc kubenswrapper[4810]: I1203 05:45:38.212149 4810 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b006771a-0092-4cc6-bd30-77a421020e10" Dec 03 05:45:38 crc kubenswrapper[4810]: I1203 05:45:38.214886 4810 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="22324bd0-10ba-4033-bda4-d29f12fdfbd3" Dec 03 05:45:45 crc kubenswrapper[4810]: I1203 05:45:45.334275 4810 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 05:45:45 crc kubenswrapper[4810]: I1203 05:45:45.335024 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 05:45:46 crc kubenswrapper[4810]: I1203 05:45:46.345497 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 05:45:46 crc kubenswrapper[4810]: I1203 05:45:46.593033 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 05:45:47 crc kubenswrapper[4810]: I1203 05:45:47.047252 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 05:45:47 crc kubenswrapper[4810]: I1203 05:45:47.167572 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 05:45:47 crc kubenswrapper[4810]: I1203 05:45:47.915882 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.009021 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.065973 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.081471 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.212852 4810 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.215630 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=33.215610637 podStartE2EDuration="33.215610637s" podCreationTimestamp="2025-12-03 05:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:45:37.24878037 +0000 UTC m=+261.184241221" watchObservedRunningTime="2025-12-03 05:45:48.215610637 +0000 UTC m=+272.151071478" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.217718 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-nbzmx"] Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.217794 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.223970 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.225015 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.238506 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=11.238486074 podStartE2EDuration="11.238486074s" podCreationTimestamp="2025-12-03 05:45:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:45:48.237398164 +0000 UTC m=+272.172859015" watchObservedRunningTime="2025-12-03 05:45:48.238486074 +0000 UTC m=+272.173946905" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.384166 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" path="/var/lib/kubelet/pods/2f676559-93f9-4af5-9079-2d1edaa8862f/volumes" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.436243 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.527251 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.651275 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 05:45:48 crc kubenswrapper[4810]: I1203 05:45:48.972509 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.040326 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.228226 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.250291 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.277228 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.343356 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.462184 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.481033 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.509438 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.512782 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.516525 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.650678 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.781031 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.782234 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.811392 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.824685 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.848240 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 05:45:49 crc kubenswrapper[4810]: I1203 05:45:49.923241 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.107857 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.180417 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.292716 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.300216 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.540172 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.648816 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.668869 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.669803 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.690500 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.751206 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.768033 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.809340 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.857559 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 05:45:50 crc kubenswrapper[4810]: I1203 05:45:50.949289 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.002216 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.060136 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.064176 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.080502 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.123032 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.151920 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.164276 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.181227 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.191777 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.414825 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.445170 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.610630 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.672302 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.904858 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 05:45:51 crc kubenswrapper[4810]: I1203 05:45:51.943243 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.113525 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.177646 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.209363 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.309138 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.408999 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.442477 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.466409 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.486994 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.494646 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.510019 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.630383 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.682170 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.752030 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.810144 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.821491 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.885023 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.937082 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 05:45:52 crc kubenswrapper[4810]: I1203 05:45:52.943948 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.092566 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.105130 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.165311 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.221426 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.332463 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.509296 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.522843 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.547222 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.619542 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.619566 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.639511 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.645810 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.679809 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.725593 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.871437 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.892644 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 05:45:53 crc kubenswrapper[4810]: I1203 05:45:53.940023 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.080136 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.188822 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.193466 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-56c748df47-29v8s"] Dec 03 05:45:54 crc kubenswrapper[4810]: E1203 05:45:54.193672 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" containerName="oauth-openshift" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.193684 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" containerName="oauth-openshift" Dec 03 05:45:54 crc kubenswrapper[4810]: E1203 05:45:54.193698 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" containerName="installer" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.193704 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" containerName="installer" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.193809 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cd24987-2a4f-406c-a597-95ddf6ad7908" containerName="installer" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.193824 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f676559-93f9-4af5-9079-2d1edaa8862f" containerName="oauth-openshift" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.194173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.198214 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.200778 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.200919 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.200980 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.201048 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.200924 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.201274 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.201510 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.201656 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.201813 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.202019 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.202287 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.202440 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.206286 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.209938 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-56c748df47-29v8s"] Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.212644 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.222335 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235381 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bb058d8f-da85-42fc-9f93-2a457a3aca88-audit-dir\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235434 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-cliconfig\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235500 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-router-certs\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235743 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-serving-cert\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235858 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-session\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235942 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-service-ca\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.235986 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-audit-policies\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337376 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-serving-cert\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337453 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337487 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337511 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzhdx\" (UniqueName: \"kubernetes.io/projected/bb058d8f-da85-42fc-9f93-2a457a3aca88-kube-api-access-vzhdx\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337540 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-session\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337569 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-login\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337609 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337645 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-service-ca\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337672 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-audit-policies\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337699 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bb058d8f-da85-42fc-9f93-2a457a3aca88-audit-dir\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337723 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-cliconfig\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337778 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-error\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337820 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-router-certs\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.337859 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.339192 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bb058d8f-da85-42fc-9f93-2a457a3aca88-audit-dir\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.340143 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-cliconfig\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.340369 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-audit-policies\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.340564 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-service-ca\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.344596 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-session\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.344632 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-serving-cert\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.345613 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-router-certs\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.356481 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.439332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.439884 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.439913 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.439939 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzhdx\" (UniqueName: \"kubernetes.io/projected/bb058d8f-da85-42fc-9f93-2a457a3aca88-kube-api-access-vzhdx\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.439964 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-login\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.439999 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.440052 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-error\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.442204 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.445214 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.447428 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-login\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.448918 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-error\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.457503 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.461224 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bb058d8f-da85-42fc-9f93-2a457a3aca88-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.466165 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzhdx\" (UniqueName: \"kubernetes.io/projected/bb058d8f-da85-42fc-9f93-2a457a3aca88-kube-api-access-vzhdx\") pod \"oauth-openshift-56c748df47-29v8s\" (UID: \"bb058d8f-da85-42fc-9f93-2a457a3aca88\") " pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.504382 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.524236 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.555748 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.679820 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.749132 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.812182 4810 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 05:45:54 crc kubenswrapper[4810]: I1203 05:45:54.922925 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.007014 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.021825 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.063990 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.083716 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.118529 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.192640 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.267541 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.301934 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.334404 4810 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.334511 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.334599 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.335785 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"01098c882742c12fb6d8b9bf90d8124791ca772003134ce94d35a9b5ab3a3f20"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.336004 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://01098c882742c12fb6d8b9bf90d8124791ca772003134ce94d35a9b5ab3a3f20" gracePeriod=30 Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.343933 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.370589 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.658466 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.671427 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.677008 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.690699 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.697523 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.788980 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.842954 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.907580 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.947703 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 05:45:55 crc kubenswrapper[4810]: I1203 05:45:55.954322 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.051104 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.053272 4810 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.190584 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.268089 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.340810 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.385495 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.436207 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.545110 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.651761 4810 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.727525 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.728943 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 05:45:56 crc kubenswrapper[4810]: I1203 05:45:56.816154 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.051047 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.079632 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.154187 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.172711 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.191624 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.217215 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.281125 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.314056 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.320361 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.332321 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.362180 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.408711 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: E1203 05:45:57.429499 4810 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 03 05:45:57 crc kubenswrapper[4810]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-56c748df47-29v8s_openshift-authentication_bb058d8f-da85-42fc-9f93-2a457a3aca88_0(1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1): error adding pod openshift-authentication_oauth-openshift-56c748df47-29v8s to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1" Netns:"/var/run/netns/4945c936-81cb-427f-b1f4-5c886570bfa1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-56c748df47-29v8s;K8S_POD_INFRA_CONTAINER_ID=1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1;K8S_POD_UID=bb058d8f-da85-42fc-9f93-2a457a3aca88" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-56c748df47-29v8s] networking: Multus: [openshift-authentication/oauth-openshift-56c748df47-29v8s/bb058d8f-da85-42fc-9f93-2a457a3aca88]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-56c748df47-29v8s in out of cluster comm: pod "oauth-openshift-56c748df47-29v8s" not found Dec 03 05:45:57 crc kubenswrapper[4810]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 03 05:45:57 crc kubenswrapper[4810]: > Dec 03 05:45:57 crc kubenswrapper[4810]: E1203 05:45:57.429584 4810 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 03 05:45:57 crc kubenswrapper[4810]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-56c748df47-29v8s_openshift-authentication_bb058d8f-da85-42fc-9f93-2a457a3aca88_0(1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1): error adding pod openshift-authentication_oauth-openshift-56c748df47-29v8s to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1" Netns:"/var/run/netns/4945c936-81cb-427f-b1f4-5c886570bfa1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-56c748df47-29v8s;K8S_POD_INFRA_CONTAINER_ID=1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1;K8S_POD_UID=bb058d8f-da85-42fc-9f93-2a457a3aca88" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-56c748df47-29v8s] networking: Multus: [openshift-authentication/oauth-openshift-56c748df47-29v8s/bb058d8f-da85-42fc-9f93-2a457a3aca88]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-56c748df47-29v8s in out of cluster comm: pod "oauth-openshift-56c748df47-29v8s" not found Dec 03 05:45:57 crc kubenswrapper[4810]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 03 05:45:57 crc kubenswrapper[4810]: > pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:57 crc kubenswrapper[4810]: E1203 05:45:57.429613 4810 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 03 05:45:57 crc kubenswrapper[4810]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-56c748df47-29v8s_openshift-authentication_bb058d8f-da85-42fc-9f93-2a457a3aca88_0(1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1): error adding pod openshift-authentication_oauth-openshift-56c748df47-29v8s to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1" Netns:"/var/run/netns/4945c936-81cb-427f-b1f4-5c886570bfa1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-56c748df47-29v8s;K8S_POD_INFRA_CONTAINER_ID=1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1;K8S_POD_UID=bb058d8f-da85-42fc-9f93-2a457a3aca88" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-56c748df47-29v8s] networking: Multus: [openshift-authentication/oauth-openshift-56c748df47-29v8s/bb058d8f-da85-42fc-9f93-2a457a3aca88]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-56c748df47-29v8s in out of cluster comm: pod "oauth-openshift-56c748df47-29v8s" not found Dec 03 05:45:57 crc kubenswrapper[4810]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 03 05:45:57 crc kubenswrapper[4810]: > pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:57 crc kubenswrapper[4810]: E1203 05:45:57.429674 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"oauth-openshift-56c748df47-29v8s_openshift-authentication(bb058d8f-da85-42fc-9f93-2a457a3aca88)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"oauth-openshift-56c748df47-29v8s_openshift-authentication(bb058d8f-da85-42fc-9f93-2a457a3aca88)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-56c748df47-29v8s_openshift-authentication_bb058d8f-da85-42fc-9f93-2a457a3aca88_0(1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1): error adding pod openshift-authentication_oauth-openshift-56c748df47-29v8s to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1\\\" Netns:\\\"/var/run/netns/4945c936-81cb-427f-b1f4-5c886570bfa1\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-56c748df47-29v8s;K8S_POD_INFRA_CONTAINER_ID=1a2637edb974f0dab29ee0e161ef080a96ba7e3e10da560069d521e7f329e1e1;K8S_POD_UID=bb058d8f-da85-42fc-9f93-2a457a3aca88\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-56c748df47-29v8s] networking: Multus: [openshift-authentication/oauth-openshift-56c748df47-29v8s/bb058d8f-da85-42fc-9f93-2a457a3aca88]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-56c748df47-29v8s in out of cluster comm: pod \\\"oauth-openshift-56c748df47-29v8s\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" podUID="bb058d8f-da85-42fc-9f93-2a457a3aca88" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.450472 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.470364 4810 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.515179 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.564470 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.627685 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.642488 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.815960 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.846099 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.856238 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.892929 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.899909 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.928199 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 05:45:57 crc kubenswrapper[4810]: I1203 05:45:57.928933 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.011924 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.054714 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.094709 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.104206 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.128654 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.161059 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.188588 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.335885 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.336560 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.461205 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.622035 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.730332 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.752081 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.876162 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.881986 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.889949 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.904232 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.922322 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 05:45:58 crc kubenswrapper[4810]: I1203 05:45:58.981721 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.142274 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-56c748df47-29v8s"] Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.157634 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.188600 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.226774 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.234426 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.243472 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.333697 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.335918 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.345968 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" event={"ID":"bb058d8f-da85-42fc-9f93-2a457a3aca88","Type":"ContainerStarted","Data":"f332d462d1cab527f16d0494fa1cea7ad89df2a64c71106c67a3f0837138b915"} Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.358040 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.411939 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.432428 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.494748 4810 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.495049 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045" gracePeriod=5 Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.508615 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.533525 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.632205 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.709090 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.776334 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.830376 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 05:45:59 crc kubenswrapper[4810]: I1203 05:45:59.991990 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.017299 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.032577 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.294404 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.353380 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" event={"ID":"bb058d8f-da85-42fc-9f93-2a457a3aca88","Type":"ContainerStarted","Data":"50bbd338d946e2a9a7b8f067afc0b26ff1b703c6f46f1328866ef1641e60264f"} Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.353814 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.358417 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.372063 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-56c748df47-29v8s" podStartSLOduration=58.372042742 podStartE2EDuration="58.372042742s" podCreationTimestamp="2025-12-03 05:45:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:46:00.370292134 +0000 UTC m=+284.305752995" watchObservedRunningTime="2025-12-03 05:46:00.372042742 +0000 UTC m=+284.307503583" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.379383 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.425533 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.527681 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.528625 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.542989 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.542990 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.655470 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.746561 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.758805 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.778466 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.800574 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.812521 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.853057 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 05:46:00 crc kubenswrapper[4810]: I1203 05:46:00.992449 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.158314 4810 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.159194 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.332753 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.363313 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.396152 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.401993 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.445406 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.485539 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.770078 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.774286 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.780644 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.879664 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.925184 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 05:46:01 crc kubenswrapper[4810]: I1203 05:46:01.974465 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.069679 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.409809 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.439125 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.453873 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.524873 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.748612 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.804388 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 05:46:02 crc kubenswrapper[4810]: I1203 05:46:02.857573 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 05:46:03 crc kubenswrapper[4810]: I1203 05:46:03.344506 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.096970 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.097424 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190086 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190156 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190209 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190237 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190289 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190460 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190546 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190629 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.190661 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.191122 4810 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.191166 4810 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.191190 4810 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.191212 4810 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.198318 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.292010 4810 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.386894 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.386997 4810 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045" exitCode=137 Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.387097 4810 scope.go:117] "RemoveContainer" containerID="ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.387114 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.410823 4810 scope.go:117] "RemoveContainer" containerID="ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045" Dec 03 05:46:05 crc kubenswrapper[4810]: E1203 05:46:05.411291 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045\": container with ID starting with ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045 not found: ID does not exist" containerID="ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045" Dec 03 05:46:05 crc kubenswrapper[4810]: I1203 05:46:05.411337 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045"} err="failed to get container status \"ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045\": rpc error: code = NotFound desc = could not find container \"ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045\": container with ID starting with ec7601a74a078189b256aa3616d044c7a2f49bb3cae63536cdef6303dd69c045 not found: ID does not exist" Dec 03 05:46:06 crc kubenswrapper[4810]: I1203 05:46:06.383802 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 05:46:06 crc kubenswrapper[4810]: I1203 05:46:06.384122 4810 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 03 05:46:06 crc kubenswrapper[4810]: I1203 05:46:06.399429 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 05:46:06 crc kubenswrapper[4810]: I1203 05:46:06.399472 4810 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="60218789-6046-498c-b474-f9aa91dd0fe4" Dec 03 05:46:06 crc kubenswrapper[4810]: I1203 05:46:06.405429 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 05:46:06 crc kubenswrapper[4810]: I1203 05:46:06.405495 4810 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="60218789-6046-498c-b474-f9aa91dd0fe4" Dec 03 05:46:16 crc kubenswrapper[4810]: I1203 05:46:16.222644 4810 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 03 05:46:21 crc kubenswrapper[4810]: I1203 05:46:21.498554 4810 generic.go:334] "Generic (PLEG): container finished" podID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerID="ea14a6044b5073a9ce04946750bf9ae58f057206e2ddfb8f0b135d0f35a2a678" exitCode=0 Dec 03 05:46:21 crc kubenswrapper[4810]: I1203 05:46:21.498711 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" event={"ID":"16fa1024-0d69-4a97-8b3f-172a6591c81a","Type":"ContainerDied","Data":"ea14a6044b5073a9ce04946750bf9ae58f057206e2ddfb8f0b135d0f35a2a678"} Dec 03 05:46:21 crc kubenswrapper[4810]: I1203 05:46:21.500534 4810 scope.go:117] "RemoveContainer" containerID="ea14a6044b5073a9ce04946750bf9ae58f057206e2ddfb8f0b135d0f35a2a678" Dec 03 05:46:22 crc kubenswrapper[4810]: I1203 05:46:22.195212 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 05:46:22 crc kubenswrapper[4810]: I1203 05:46:22.506759 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" event={"ID":"16fa1024-0d69-4a97-8b3f-172a6591c81a","Type":"ContainerStarted","Data":"65aaef5133d0e2599f4f6fa4de9a103d69088a5aa4cac9c6ec5cffed9de135fd"} Dec 03 05:46:22 crc kubenswrapper[4810]: I1203 05:46:22.507227 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:46:22 crc kubenswrapper[4810]: I1203 05:46:22.509287 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:46:22 crc kubenswrapper[4810]: I1203 05:46:22.558514 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 05:46:25 crc kubenswrapper[4810]: I1203 05:46:25.523290 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 05:46:25 crc kubenswrapper[4810]: I1203 05:46:25.525997 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 05:46:25 crc kubenswrapper[4810]: I1203 05:46:25.526055 4810 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="01098c882742c12fb6d8b9bf90d8124791ca772003134ce94d35a9b5ab3a3f20" exitCode=137 Dec 03 05:46:25 crc kubenswrapper[4810]: I1203 05:46:25.526087 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"01098c882742c12fb6d8b9bf90d8124791ca772003134ce94d35a9b5ab3a3f20"} Dec 03 05:46:25 crc kubenswrapper[4810]: I1203 05:46:25.526120 4810 scope.go:117] "RemoveContainer" containerID="b70ed24022fce521ed132b45ef0afaf78b92de4977dc0594996b5580f9e6959d" Dec 03 05:46:26 crc kubenswrapper[4810]: I1203 05:46:26.533146 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 05:46:26 crc kubenswrapper[4810]: I1203 05:46:26.534626 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"40a0dbad4bf5d97bc6752c2cc716d1855d8958bd207a165ef09282f7cf9b8d68"} Dec 03 05:46:27 crc kubenswrapper[4810]: I1203 05:46:27.578846 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:46:35 crc kubenswrapper[4810]: I1203 05:46:35.334118 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:46:35 crc kubenswrapper[4810]: I1203 05:46:35.344911 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:46:37 crc kubenswrapper[4810]: I1203 05:46:37.584769 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.015139 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-djbsd"] Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.015827 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" podUID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" containerName="controller-manager" containerID="cri-o://526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83" gracePeriod=30 Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.089337 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd"] Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.089553 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" podUID="373c7218-dd5b-411d-bf82-94d13f4ca81a" containerName="route-controller-manager" containerID="cri-o://0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f" gracePeriod=30 Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.365636 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.479627 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.549686 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tm65\" (UniqueName: \"kubernetes.io/projected/c34a2d47-0bc4-4100-bd82-d2bf8e571129-kube-api-access-5tm65\") pod \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.549769 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c34a2d47-0bc4-4100-bd82-d2bf8e571129-serving-cert\") pod \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.549798 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-config\") pod \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.549834 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-client-ca\") pod \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.549915 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-proxy-ca-bundles\") pod \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\" (UID: \"c34a2d47-0bc4-4100-bd82-d2bf8e571129\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.550987 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-client-ca" (OuterVolumeSpecName: "client-ca") pod "c34a2d47-0bc4-4100-bd82-d2bf8e571129" (UID: "c34a2d47-0bc4-4100-bd82-d2bf8e571129"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.551118 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-config" (OuterVolumeSpecName: "config") pod "c34a2d47-0bc4-4100-bd82-d2bf8e571129" (UID: "c34a2d47-0bc4-4100-bd82-d2bf8e571129"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.551128 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c34a2d47-0bc4-4100-bd82-d2bf8e571129" (UID: "c34a2d47-0bc4-4100-bd82-d2bf8e571129"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.551678 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.551698 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.551726 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c34a2d47-0bc4-4100-bd82-d2bf8e571129-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.557283 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c34a2d47-0bc4-4100-bd82-d2bf8e571129-kube-api-access-5tm65" (OuterVolumeSpecName: "kube-api-access-5tm65") pod "c34a2d47-0bc4-4100-bd82-d2bf8e571129" (UID: "c34a2d47-0bc4-4100-bd82-d2bf8e571129"). InnerVolumeSpecName "kube-api-access-5tm65". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.563239 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c34a2d47-0bc4-4100-bd82-d2bf8e571129-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c34a2d47-0bc4-4100-bd82-d2bf8e571129" (UID: "c34a2d47-0bc4-4100-bd82-d2bf8e571129"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.642970 4810 generic.go:334] "Generic (PLEG): container finished" podID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" containerID="526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83" exitCode=0 Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.643057 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" event={"ID":"c34a2d47-0bc4-4100-bd82-d2bf8e571129","Type":"ContainerDied","Data":"526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83"} Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.643078 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.643104 4810 scope.go:117] "RemoveContainer" containerID="526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.643088 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-djbsd" event={"ID":"c34a2d47-0bc4-4100-bd82-d2bf8e571129","Type":"ContainerDied","Data":"75ce807d7eb0e57a913560a8a9e8955867b5275af0f542692e5e7c7ad598010f"} Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.644464 4810 generic.go:334] "Generic (PLEG): container finished" podID="373c7218-dd5b-411d-bf82-94d13f4ca81a" containerID="0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f" exitCode=0 Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.644499 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" event={"ID":"373c7218-dd5b-411d-bf82-94d13f4ca81a","Type":"ContainerDied","Data":"0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f"} Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.644522 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" event={"ID":"373c7218-dd5b-411d-bf82-94d13f4ca81a","Type":"ContainerDied","Data":"3755d609aefd01d6f7d92e1d73d81795485846bc67b5eb326704e2b8202075ed"} Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.644577 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.652202 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkjt7\" (UniqueName: \"kubernetes.io/projected/373c7218-dd5b-411d-bf82-94d13f4ca81a-kube-api-access-xkjt7\") pod \"373c7218-dd5b-411d-bf82-94d13f4ca81a\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.652380 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-config\") pod \"373c7218-dd5b-411d-bf82-94d13f4ca81a\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.652486 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-client-ca\") pod \"373c7218-dd5b-411d-bf82-94d13f4ca81a\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.652628 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/373c7218-dd5b-411d-bf82-94d13f4ca81a-serving-cert\") pod \"373c7218-dd5b-411d-bf82-94d13f4ca81a\" (UID: \"373c7218-dd5b-411d-bf82-94d13f4ca81a\") " Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.653089 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tm65\" (UniqueName: \"kubernetes.io/projected/c34a2d47-0bc4-4100-bd82-d2bf8e571129-kube-api-access-5tm65\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.653201 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c34a2d47-0bc4-4100-bd82-d2bf8e571129-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.653708 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-config" (OuterVolumeSpecName: "config") pod "373c7218-dd5b-411d-bf82-94d13f4ca81a" (UID: "373c7218-dd5b-411d-bf82-94d13f4ca81a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.653819 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-client-ca" (OuterVolumeSpecName: "client-ca") pod "373c7218-dd5b-411d-bf82-94d13f4ca81a" (UID: "373c7218-dd5b-411d-bf82-94d13f4ca81a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.655119 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/373c7218-dd5b-411d-bf82-94d13f4ca81a-kube-api-access-xkjt7" (OuterVolumeSpecName: "kube-api-access-xkjt7") pod "373c7218-dd5b-411d-bf82-94d13f4ca81a" (UID: "373c7218-dd5b-411d-bf82-94d13f4ca81a"). InnerVolumeSpecName "kube-api-access-xkjt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.658078 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/373c7218-dd5b-411d-bf82-94d13f4ca81a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "373c7218-dd5b-411d-bf82-94d13f4ca81a" (UID: "373c7218-dd5b-411d-bf82-94d13f4ca81a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.665797 4810 scope.go:117] "RemoveContainer" containerID="526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83" Dec 03 05:46:44 crc kubenswrapper[4810]: E1203 05:46:44.666988 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83\": container with ID starting with 526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83 not found: ID does not exist" containerID="526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.667044 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83"} err="failed to get container status \"526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83\": rpc error: code = NotFound desc = could not find container \"526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83\": container with ID starting with 526140f929a970ab4c15074937365e6a8572f22d6046c1656e007f8d19819e83 not found: ID does not exist" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.667081 4810 scope.go:117] "RemoveContainer" containerID="0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.669547 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-djbsd"] Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.673385 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-djbsd"] Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.684201 4810 scope.go:117] "RemoveContainer" containerID="0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f" Dec 03 05:46:44 crc kubenswrapper[4810]: E1203 05:46:44.685288 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f\": container with ID starting with 0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f not found: ID does not exist" containerID="0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.685420 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f"} err="failed to get container status \"0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f\": rpc error: code = NotFound desc = could not find container \"0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f\": container with ID starting with 0f7663957dff426c9e68c1674fde5fca45a2c5653be78e224ea8c8352b66c33f not found: ID does not exist" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.754868 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.754908 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/373c7218-dd5b-411d-bf82-94d13f4ca81a-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.754921 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/373c7218-dd5b-411d-bf82-94d13f4ca81a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.754932 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkjt7\" (UniqueName: \"kubernetes.io/projected/373c7218-dd5b-411d-bf82-94d13f4ca81a-kube-api-access-xkjt7\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.968014 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd"] Dec 03 05:46:44 crc kubenswrapper[4810]: I1203 05:46:44.971080 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dlmpd"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.025976 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-76b976544-dn7ph"] Dec 03 05:46:46 crc kubenswrapper[4810]: E1203 05:46:46.026725 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.026764 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 05:46:46 crc kubenswrapper[4810]: E1203 05:46:46.026786 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" containerName="controller-manager" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.026796 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" containerName="controller-manager" Dec 03 05:46:46 crc kubenswrapper[4810]: E1203 05:46:46.026814 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="373c7218-dd5b-411d-bf82-94d13f4ca81a" containerName="route-controller-manager" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.026823 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="373c7218-dd5b-411d-bf82-94d13f4ca81a" containerName="route-controller-manager" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.026954 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" containerName="controller-manager" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.026975 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.026990 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="373c7218-dd5b-411d-bf82-94d13f4ca81a" containerName="route-controller-manager" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.027512 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.029410 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.030120 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.032108 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.032245 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.032449 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.033441 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.033842 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.033844 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.033991 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.034054 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.034060 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.034126 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.034236 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.038302 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.041237 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.047415 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.052518 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-76b976544-dn7ph"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.175892 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tls5c\" (UniqueName: \"kubernetes.io/projected/a4a99527-0752-41a1-a601-eefe01a9c331-kube-api-access-tls5c\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.175949 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-client-ca\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.175984 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmfjb\" (UniqueName: \"kubernetes.io/projected/f4538213-5bab-4f60-8b52-fc7f02145914-kube-api-access-jmfjb\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.176007 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-proxy-ca-bundles\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.176037 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-config\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.176055 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-client-ca\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.176071 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-config\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.176198 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4a99527-0752-41a1-a601-eefe01a9c331-serving-cert\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.176245 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4538213-5bab-4f60-8b52-fc7f02145914-serving-cert\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.277380 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tls5c\" (UniqueName: \"kubernetes.io/projected/a4a99527-0752-41a1-a601-eefe01a9c331-kube-api-access-tls5c\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.277947 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-client-ca\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278077 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmfjb\" (UniqueName: \"kubernetes.io/projected/f4538213-5bab-4f60-8b52-fc7f02145914-kube-api-access-jmfjb\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278165 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-proxy-ca-bundles\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278252 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-config\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278335 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-client-ca\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278447 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-config\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278561 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4a99527-0752-41a1-a601-eefe01a9c331-serving-cert\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.278691 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4538213-5bab-4f60-8b52-fc7f02145914-serving-cert\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.279303 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-client-ca\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.279346 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-proxy-ca-bundles\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.279594 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-config\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.281317 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-client-ca\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.282869 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4538213-5bab-4f60-8b52-fc7f02145914-serving-cert\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.283040 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4a99527-0752-41a1-a601-eefe01a9c331-serving-cert\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.283037 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-config\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.294638 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmfjb\" (UniqueName: \"kubernetes.io/projected/f4538213-5bab-4f60-8b52-fc7f02145914-kube-api-access-jmfjb\") pod \"controller-manager-76b976544-dn7ph\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.296178 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tls5c\" (UniqueName: \"kubernetes.io/projected/a4a99527-0752-41a1-a601-eefe01a9c331-kube-api-access-tls5c\") pod \"route-controller-manager-8d8c54746-k4d8l\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.369123 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.378327 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.385756 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="373c7218-dd5b-411d-bf82-94d13f4ca81a" path="/var/lib/kubelet/pods/373c7218-dd5b-411d-bf82-94d13f4ca81a/volumes" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.386494 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c34a2d47-0bc4-4100-bd82-d2bf8e571129" path="/var/lib/kubelet/pods/c34a2d47-0bc4-4100-bd82-d2bf8e571129/volumes" Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.481316 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76b976544-dn7ph"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.494754 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.637642 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76b976544-dn7ph"] Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.660271 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" event={"ID":"f4538213-5bab-4f60-8b52-fc7f02145914","Type":"ContainerStarted","Data":"1d6705dc4d9d598c95f5a2ef539f8e50643c5d2b187516c3316cc9256add0c54"} Dec 03 05:46:46 crc kubenswrapper[4810]: I1203 05:46:46.690615 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l"] Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.670338 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" podUID="f4538213-5bab-4f60-8b52-fc7f02145914" containerName="controller-manager" containerID="cri-o://7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699" gracePeriod=30 Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.670359 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" event={"ID":"f4538213-5bab-4f60-8b52-fc7f02145914","Type":"ContainerStarted","Data":"7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699"} Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.676371 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" podUID="a4a99527-0752-41a1-a601-eefe01a9c331" containerName="route-controller-manager" containerID="cri-o://5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a" gracePeriod=30 Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.677848 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" event={"ID":"a4a99527-0752-41a1-a601-eefe01a9c331","Type":"ContainerStarted","Data":"5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a"} Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.677888 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" event={"ID":"a4a99527-0752-41a1-a601-eefe01a9c331","Type":"ContainerStarted","Data":"71094eab00b6dc233d34093b2a0a28bb98e15cd581f48c8d4fcc38d623c109d5"} Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.677912 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.677926 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.677981 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.682608 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.701371 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" podStartSLOduration=3.701335695 podStartE2EDuration="3.701335695s" podCreationTimestamp="2025-12-03 05:46:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:46:47.697290336 +0000 UTC m=+331.632751177" watchObservedRunningTime="2025-12-03 05:46:47.701335695 +0000 UTC m=+331.636796566" Dec 03 05:46:47 crc kubenswrapper[4810]: I1203 05:46:47.736414 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" podStartSLOduration=3.736392153 podStartE2EDuration="3.736392153s" podCreationTimestamp="2025-12-03 05:46:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:46:47.724906262 +0000 UTC m=+331.660367143" watchObservedRunningTime="2025-12-03 05:46:47.736392153 +0000 UTC m=+331.671852994" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.005795 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.041202 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk"] Dec 03 05:46:48 crc kubenswrapper[4810]: E1203 05:46:48.041848 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4a99527-0752-41a1-a601-eefe01a9c331" containerName="route-controller-manager" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.041973 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4a99527-0752-41a1-a601-eefe01a9c331" containerName="route-controller-manager" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.042181 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4a99527-0752-41a1-a601-eefe01a9c331" containerName="route-controller-manager" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.042755 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.045195 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk"] Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.048565 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.100801 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-config\") pod \"a4a99527-0752-41a1-a601-eefe01a9c331\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.100850 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-client-ca\") pod \"a4a99527-0752-41a1-a601-eefe01a9c331\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.100887 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tls5c\" (UniqueName: \"kubernetes.io/projected/a4a99527-0752-41a1-a601-eefe01a9c331-kube-api-access-tls5c\") pod \"a4a99527-0752-41a1-a601-eefe01a9c331\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.100964 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4a99527-0752-41a1-a601-eefe01a9c331-serving-cert\") pod \"a4a99527-0752-41a1-a601-eefe01a9c331\" (UID: \"a4a99527-0752-41a1-a601-eefe01a9c331\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.101884 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-config" (OuterVolumeSpecName: "config") pod "a4a99527-0752-41a1-a601-eefe01a9c331" (UID: "a4a99527-0752-41a1-a601-eefe01a9c331"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.102437 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-client-ca" (OuterVolumeSpecName: "client-ca") pod "a4a99527-0752-41a1-a601-eefe01a9c331" (UID: "a4a99527-0752-41a1-a601-eefe01a9c331"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.106878 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4a99527-0752-41a1-a601-eefe01a9c331-kube-api-access-tls5c" (OuterVolumeSpecName: "kube-api-access-tls5c") pod "a4a99527-0752-41a1-a601-eefe01a9c331" (UID: "a4a99527-0752-41a1-a601-eefe01a9c331"). InnerVolumeSpecName "kube-api-access-tls5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.106996 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4a99527-0752-41a1-a601-eefe01a9c331-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a4a99527-0752-41a1-a601-eefe01a9c331" (UID: "a4a99527-0752-41a1-a601-eefe01a9c331"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.201690 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmfjb\" (UniqueName: \"kubernetes.io/projected/f4538213-5bab-4f60-8b52-fc7f02145914-kube-api-access-jmfjb\") pod \"f4538213-5bab-4f60-8b52-fc7f02145914\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.201755 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-proxy-ca-bundles\") pod \"f4538213-5bab-4f60-8b52-fc7f02145914\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.201834 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4538213-5bab-4f60-8b52-fc7f02145914-serving-cert\") pod \"f4538213-5bab-4f60-8b52-fc7f02145914\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.201870 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-client-ca\") pod \"f4538213-5bab-4f60-8b52-fc7f02145914\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.201935 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-config\") pod \"f4538213-5bab-4f60-8b52-fc7f02145914\" (UID: \"f4538213-5bab-4f60-8b52-fc7f02145914\") " Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202118 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21c3eaba-0f90-400f-9f62-d74941b9d1f4-config\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202172 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21c3eaba-0f90-400f-9f62-d74941b9d1f4-serving-cert\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202204 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6vvp\" (UniqueName: \"kubernetes.io/projected/21c3eaba-0f90-400f-9f62-d74941b9d1f4-kube-api-access-v6vvp\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202262 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21c3eaba-0f90-400f-9f62-d74941b9d1f4-client-ca\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202314 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202329 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a4a99527-0752-41a1-a601-eefe01a9c331-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202342 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tls5c\" (UniqueName: \"kubernetes.io/projected/a4a99527-0752-41a1-a601-eefe01a9c331-kube-api-access-tls5c\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.202356 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4a99527-0752-41a1-a601-eefe01a9c331-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.203086 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-client-ca" (OuterVolumeSpecName: "client-ca") pod "f4538213-5bab-4f60-8b52-fc7f02145914" (UID: "f4538213-5bab-4f60-8b52-fc7f02145914"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.203078 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f4538213-5bab-4f60-8b52-fc7f02145914" (UID: "f4538213-5bab-4f60-8b52-fc7f02145914"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.203108 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-config" (OuterVolumeSpecName: "config") pod "f4538213-5bab-4f60-8b52-fc7f02145914" (UID: "f4538213-5bab-4f60-8b52-fc7f02145914"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.206884 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4538213-5bab-4f60-8b52-fc7f02145914-kube-api-access-jmfjb" (OuterVolumeSpecName: "kube-api-access-jmfjb") pod "f4538213-5bab-4f60-8b52-fc7f02145914" (UID: "f4538213-5bab-4f60-8b52-fc7f02145914"). InnerVolumeSpecName "kube-api-access-jmfjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.216965 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4538213-5bab-4f60-8b52-fc7f02145914-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f4538213-5bab-4f60-8b52-fc7f02145914" (UID: "f4538213-5bab-4f60-8b52-fc7f02145914"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303328 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21c3eaba-0f90-400f-9f62-d74941b9d1f4-client-ca\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303395 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21c3eaba-0f90-400f-9f62-d74941b9d1f4-config\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303426 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21c3eaba-0f90-400f-9f62-d74941b9d1f4-serving-cert\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303464 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6vvp\" (UniqueName: \"kubernetes.io/projected/21c3eaba-0f90-400f-9f62-d74941b9d1f4-kube-api-access-v6vvp\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303512 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmfjb\" (UniqueName: \"kubernetes.io/projected/f4538213-5bab-4f60-8b52-fc7f02145914-kube-api-access-jmfjb\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303524 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303534 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4538213-5bab-4f60-8b52-fc7f02145914-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303544 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.303553 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4538213-5bab-4f60-8b52-fc7f02145914-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.304510 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21c3eaba-0f90-400f-9f62-d74941b9d1f4-client-ca\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.304587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21c3eaba-0f90-400f-9f62-d74941b9d1f4-config\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.307150 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21c3eaba-0f90-400f-9f62-d74941b9d1f4-serving-cert\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.322490 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6vvp\" (UniqueName: \"kubernetes.io/projected/21c3eaba-0f90-400f-9f62-d74941b9d1f4-kube-api-access-v6vvp\") pod \"route-controller-manager-76dbb48c79-wq6tk\" (UID: \"21c3eaba-0f90-400f-9f62-d74941b9d1f4\") " pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.364332 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.684301 4810 generic.go:334] "Generic (PLEG): container finished" podID="a4a99527-0752-41a1-a601-eefe01a9c331" containerID="5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a" exitCode=0 Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.684370 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.684393 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" event={"ID":"a4a99527-0752-41a1-a601-eefe01a9c331","Type":"ContainerDied","Data":"5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a"} Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.684790 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l" event={"ID":"a4a99527-0752-41a1-a601-eefe01a9c331","Type":"ContainerDied","Data":"71094eab00b6dc233d34093b2a0a28bb98e15cd581f48c8d4fcc38d623c109d5"} Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.684816 4810 scope.go:117] "RemoveContainer" containerID="5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.686999 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4538213-5bab-4f60-8b52-fc7f02145914" containerID="7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699" exitCode=0 Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.687055 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" event={"ID":"f4538213-5bab-4f60-8b52-fc7f02145914","Type":"ContainerDied","Data":"7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699"} Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.687076 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" event={"ID":"f4538213-5bab-4f60-8b52-fc7f02145914","Type":"ContainerDied","Data":"1d6705dc4d9d598c95f5a2ef539f8e50643c5d2b187516c3316cc9256add0c54"} Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.687163 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76b976544-dn7ph" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.708985 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76b976544-dn7ph"] Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.713873 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-76b976544-dn7ph"] Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.717415 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l"] Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.717633 4810 scope.go:117] "RemoveContainer" containerID="5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a" Dec 03 05:46:48 crc kubenswrapper[4810]: E1203 05:46:48.718795 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a\": container with ID starting with 5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a not found: ID does not exist" containerID="5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.718875 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a"} err="failed to get container status \"5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a\": rpc error: code = NotFound desc = could not find container \"5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a\": container with ID starting with 5e1587abf00c27c90bccf5ac3ef087d86ef3a6b5ed5ba834ade4d83a991de54a not found: ID does not exist" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.718920 4810 scope.go:117] "RemoveContainer" containerID="7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.720617 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8d8c54746-k4d8l"] Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.733837 4810 scope.go:117] "RemoveContainer" containerID="7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699" Dec 03 05:46:48 crc kubenswrapper[4810]: E1203 05:46:48.734326 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699\": container with ID starting with 7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699 not found: ID does not exist" containerID="7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.734370 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699"} err="failed to get container status \"7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699\": rpc error: code = NotFound desc = could not find container \"7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699\": container with ID starting with 7f62144287d379af07f751b1d318110ff098b0c5687315bef8bc3448d402b699 not found: ID does not exist" Dec 03 05:46:48 crc kubenswrapper[4810]: I1203 05:46:48.780827 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk"] Dec 03 05:46:48 crc kubenswrapper[4810]: W1203 05:46:48.788049 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21c3eaba_0f90_400f_9f62_d74941b9d1f4.slice/crio-41117fd9df2f6e37d72b7b3cb89d0b8a74fa2456115e98e77b179dd6fb138913 WatchSource:0}: Error finding container 41117fd9df2f6e37d72b7b3cb89d0b8a74fa2456115e98e77b179dd6fb138913: Status 404 returned error can't find the container with id 41117fd9df2f6e37d72b7b3cb89d0b8a74fa2456115e98e77b179dd6fb138913 Dec 03 05:46:49 crc kubenswrapper[4810]: I1203 05:46:49.695186 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" event={"ID":"21c3eaba-0f90-400f-9f62-d74941b9d1f4","Type":"ContainerStarted","Data":"edb22ffd1a626f9148f52d20599e5e7d10e8484e1fa1a83594beccab829f0ec1"} Dec 03 05:46:49 crc kubenswrapper[4810]: I1203 05:46:49.695460 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" event={"ID":"21c3eaba-0f90-400f-9f62-d74941b9d1f4","Type":"ContainerStarted","Data":"41117fd9df2f6e37d72b7b3cb89d0b8a74fa2456115e98e77b179dd6fb138913"} Dec 03 05:46:49 crc kubenswrapper[4810]: I1203 05:46:49.695474 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:49 crc kubenswrapper[4810]: I1203 05:46:49.701695 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" Dec 03 05:46:49 crc kubenswrapper[4810]: I1203 05:46:49.719647 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-76dbb48c79-wq6tk" podStartSLOduration=3.719616827 podStartE2EDuration="3.719616827s" podCreationTimestamp="2025-12-03 05:46:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:46:49.712106714 +0000 UTC m=+333.647567575" watchObservedRunningTime="2025-12-03 05:46:49.719616827 +0000 UTC m=+333.655077688" Dec 03 05:46:50 crc kubenswrapper[4810]: I1203 05:46:50.385415 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4a99527-0752-41a1-a601-eefe01a9c331" path="/var/lib/kubelet/pods/a4a99527-0752-41a1-a601-eefe01a9c331/volumes" Dec 03 05:46:50 crc kubenswrapper[4810]: I1203 05:46:50.386160 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4538213-5bab-4f60-8b52-fc7f02145914" path="/var/lib/kubelet/pods/f4538213-5bab-4f60-8b52-fc7f02145914/volumes" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.041293 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m"] Dec 03 05:46:51 crc kubenswrapper[4810]: E1203 05:46:51.041727 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4538213-5bab-4f60-8b52-fc7f02145914" containerName="controller-manager" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.041796 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4538213-5bab-4f60-8b52-fc7f02145914" containerName="controller-manager" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.042056 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4538213-5bab-4f60-8b52-fc7f02145914" containerName="controller-manager" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.042889 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.051184 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.051517 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.051720 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.053309 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.053454 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.055899 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.057409 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m"] Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.065628 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.139930 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e01ab02-2031-47d0-8bcd-f48d001bda42-serving-cert\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.140285 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-config\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.140348 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g8kj\" (UniqueName: \"kubernetes.io/projected/4e01ab02-2031-47d0-8bcd-f48d001bda42-kube-api-access-2g8kj\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.140384 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-client-ca\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.140407 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-proxy-ca-bundles\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.244039 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-client-ca\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.242501 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-client-ca\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.244196 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-proxy-ca-bundles\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.244328 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e01ab02-2031-47d0-8bcd-f48d001bda42-serving-cert\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.244361 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-config\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.244420 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g8kj\" (UniqueName: \"kubernetes.io/projected/4e01ab02-2031-47d0-8bcd-f48d001bda42-kube-api-access-2g8kj\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.248084 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-config\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.248706 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e01ab02-2031-47d0-8bcd-f48d001bda42-proxy-ca-bundles\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.254639 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e01ab02-2031-47d0-8bcd-f48d001bda42-serving-cert\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.266044 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g8kj\" (UniqueName: \"kubernetes.io/projected/4e01ab02-2031-47d0-8bcd-f48d001bda42-kube-api-access-2g8kj\") pod \"controller-manager-5c76f4b54f-bjz7m\" (UID: \"4e01ab02-2031-47d0-8bcd-f48d001bda42\") " pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.371850 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.572553 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m"] Dec 03 05:46:51 crc kubenswrapper[4810]: I1203 05:46:51.708507 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" event={"ID":"4e01ab02-2031-47d0-8bcd-f48d001bda42","Type":"ContainerStarted","Data":"2680cdd0bfa4f70599b1807a748b32aeade66d80f19379f12c95dcfa0e38d003"} Dec 03 05:46:52 crc kubenswrapper[4810]: I1203 05:46:52.715112 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" event={"ID":"4e01ab02-2031-47d0-8bcd-f48d001bda42","Type":"ContainerStarted","Data":"061cc6a18c49b774c851b5707d4b73da7a5bd798263cf795f7ee174665d6cdc1"} Dec 03 05:46:52 crc kubenswrapper[4810]: I1203 05:46:52.715455 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:52 crc kubenswrapper[4810]: I1203 05:46:52.720140 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" Dec 03 05:46:52 crc kubenswrapper[4810]: I1203 05:46:52.738851 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c76f4b54f-bjz7m" podStartSLOduration=6.738827632 podStartE2EDuration="6.738827632s" podCreationTimestamp="2025-12-03 05:46:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:46:52.73614214 +0000 UTC m=+336.671603011" watchObservedRunningTime="2025-12-03 05:46:52.738827632 +0000 UTC m=+336.674288493" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.085798 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-55x88"] Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.092224 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.109600 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-55x88"] Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276627 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-bound-sa-token\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276699 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276721 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xkbb\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-kube-api-access-5xkbb\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9768f09e-a793-4fb0-a202-afb2849db8af-trusted-ca\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276793 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9768f09e-a793-4fb0-a202-afb2849db8af-registry-certificates\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276813 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9768f09e-a793-4fb0-a202-afb2849db8af-ca-trust-extracted\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276827 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-registry-tls\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.276851 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9768f09e-a793-4fb0-a202-afb2849db8af-installation-pull-secrets\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.301260 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.377818 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9768f09e-a793-4fb0-a202-afb2849db8af-trusted-ca\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.377869 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9768f09e-a793-4fb0-a202-afb2849db8af-registry-certificates\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.377893 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9768f09e-a793-4fb0-a202-afb2849db8af-ca-trust-extracted\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.377909 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-registry-tls\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.377935 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9768f09e-a793-4fb0-a202-afb2849db8af-installation-pull-secrets\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.377975 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-bound-sa-token\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.378002 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xkbb\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-kube-api-access-5xkbb\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.379130 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9768f09e-a793-4fb0-a202-afb2849db8af-ca-trust-extracted\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.379249 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9768f09e-a793-4fb0-a202-afb2849db8af-trusted-ca\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.379393 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9768f09e-a793-4fb0-a202-afb2849db8af-registry-certificates\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.386021 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9768f09e-a793-4fb0-a202-afb2849db8af-installation-pull-secrets\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.387391 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-registry-tls\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.396682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xkbb\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-kube-api-access-5xkbb\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.397208 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9768f09e-a793-4fb0-a202-afb2849db8af-bound-sa-token\") pod \"image-registry-66df7c8f76-55x88\" (UID: \"9768f09e-a793-4fb0-a202-afb2849db8af\") " pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.416929 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:12 crc kubenswrapper[4810]: I1203 05:47:12.829260 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-55x88"] Dec 03 05:47:12 crc kubenswrapper[4810]: W1203 05:47:12.846645 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9768f09e_a793_4fb0_a202_afb2849db8af.slice/crio-39edc6dfe8ceda04f8348fd10f4677acff7d527658ec1ff7969070a73488df83 WatchSource:0}: Error finding container 39edc6dfe8ceda04f8348fd10f4677acff7d527658ec1ff7969070a73488df83: Status 404 returned error can't find the container with id 39edc6dfe8ceda04f8348fd10f4677acff7d527658ec1ff7969070a73488df83 Dec 03 05:47:13 crc kubenswrapper[4810]: I1203 05:47:13.843784 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" event={"ID":"9768f09e-a793-4fb0-a202-afb2849db8af","Type":"ContainerStarted","Data":"d5331b2b2bdb84cbc7380e309af5d9da8e450248f70199ce48b4c8a6c206a0f1"} Dec 03 05:47:13 crc kubenswrapper[4810]: I1203 05:47:13.844354 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" event={"ID":"9768f09e-a793-4fb0-a202-afb2849db8af","Type":"ContainerStarted","Data":"39edc6dfe8ceda04f8348fd10f4677acff7d527658ec1ff7969070a73488df83"} Dec 03 05:47:13 crc kubenswrapper[4810]: I1203 05:47:13.844481 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:13 crc kubenswrapper[4810]: I1203 05:47:13.868246 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" podStartSLOduration=1.8682114520000002 podStartE2EDuration="1.868211452s" podCreationTimestamp="2025-12-03 05:47:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:47:13.863809373 +0000 UTC m=+357.799270304" watchObservedRunningTime="2025-12-03 05:47:13.868211452 +0000 UTC m=+357.803672293" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.142306 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mlr8j"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.143222 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mlr8j" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="registry-server" containerID="cri-o://bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a" gracePeriod=30 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.153414 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m697x"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.153773 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m697x" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="registry-server" containerID="cri-o://551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e" gracePeriod=30 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.160783 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-22pk2"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.161038 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" containerID="cri-o://65aaef5133d0e2599f4f6fa4de9a103d69088a5aa4cac9c6ec5cffed9de135fd" gracePeriod=30 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.168243 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxjsv"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.168540 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jxjsv" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="registry-server" containerID="cri-o://56e265aef7ef22743cf7486cc613e75777f03f37b27598954221780ddf2ebc71" gracePeriod=30 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.174234 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xwgs5"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.175070 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.180347 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kbbl7"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.180871 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kbbl7" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="registry-server" containerID="cri-o://6d0498ae4e05e0b29d278205ebbec637427299144fd204442428bf854e24376f" gracePeriod=30 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.192017 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xwgs5"] Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.248328 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1de26693-7bac-4062-8ed2-d7f84510be17-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.248629 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1de26693-7bac-4062-8ed2-d7f84510be17-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.248757 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrkx2\" (UniqueName: \"kubernetes.io/projected/1de26693-7bac-4062-8ed2-d7f84510be17-kube-api-access-mrkx2\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.355332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1de26693-7bac-4062-8ed2-d7f84510be17-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.355419 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrkx2\" (UniqueName: \"kubernetes.io/projected/1de26693-7bac-4062-8ed2-d7f84510be17-kube-api-access-mrkx2\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.355476 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1de26693-7bac-4062-8ed2-d7f84510be17-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.359448 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1de26693-7bac-4062-8ed2-d7f84510be17-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.363578 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1de26693-7bac-4062-8ed2-d7f84510be17-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.373963 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrkx2\" (UniqueName: \"kubernetes.io/projected/1de26693-7bac-4062-8ed2-d7f84510be17-kube-api-access-mrkx2\") pod \"marketplace-operator-79b997595-xwgs5\" (UID: \"1de26693-7bac-4062-8ed2-d7f84510be17\") " pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.503878 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.699123 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.862477 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-catalog-content\") pod \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.862619 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f97lg\" (UniqueName: \"kubernetes.io/projected/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-kube-api-access-f97lg\") pod \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.862645 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-utilities\") pod \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\" (UID: \"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d\") " Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.863941 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-utilities" (OuterVolumeSpecName: "utilities") pod "3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" (UID: "3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.867182 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.870526 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-kube-api-access-f97lg" (OuterVolumeSpecName: "kube-api-access-f97lg") pod "3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" (UID: "3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d"). InnerVolumeSpecName "kube-api-access-f97lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.876746 4810 generic.go:334] "Generic (PLEG): container finished" podID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerID="6d0498ae4e05e0b29d278205ebbec637427299144fd204442428bf854e24376f" exitCode=0 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.876814 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerDied","Data":"6d0498ae4e05e0b29d278205ebbec637427299144fd204442428bf854e24376f"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.881249 4810 generic.go:334] "Generic (PLEG): container finished" podID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerID="56e265aef7ef22743cf7486cc613e75777f03f37b27598954221780ddf2ebc71" exitCode=0 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.881345 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxjsv" event={"ID":"f0b0d549-514c-4f29-901d-de91ae9e5242","Type":"ContainerDied","Data":"56e265aef7ef22743cf7486cc613e75777f03f37b27598954221780ddf2ebc71"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.884412 4810 generic.go:334] "Generic (PLEG): container finished" podID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerID="bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a" exitCode=0 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.884461 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mlr8j" event={"ID":"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d","Type":"ContainerDied","Data":"bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.884497 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mlr8j" event={"ID":"3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d","Type":"ContainerDied","Data":"ee6a5f5fd4ff3707e9a9c5c20fe8c4d6e9f836541b967967e50e41e35943bbdc"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.884514 4810 scope.go:117] "RemoveContainer" containerID="bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.884620 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mlr8j" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.887879 4810 generic.go:334] "Generic (PLEG): container finished" podID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerID="65aaef5133d0e2599f4f6fa4de9a103d69088a5aa4cac9c6ec5cffed9de135fd" exitCode=0 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.887984 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" event={"ID":"16fa1024-0d69-4a97-8b3f-172a6591c81a","Type":"ContainerDied","Data":"65aaef5133d0e2599f4f6fa4de9a103d69088a5aa4cac9c6ec5cffed9de135fd"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.891344 4810 generic.go:334] "Generic (PLEG): container finished" podID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerID="551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e" exitCode=0 Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.891563 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m697x" event={"ID":"4d0fa646-0a48-471d-9168-08716ab96d5e","Type":"ContainerDied","Data":"551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.891653 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m697x" event={"ID":"4d0fa646-0a48-471d-9168-08716ab96d5e","Type":"ContainerDied","Data":"2d40d1a38b1725e020be29632ce2f64523058a4fd7842dc7531f49aa86315888"} Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.891800 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m697x" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.912633 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.912999 4810 scope.go:117] "RemoveContainer" containerID="305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.917488 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" (UID: "3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.941517 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.942484 4810 scope.go:117] "RemoveContainer" containerID="b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.956783 4810 scope.go:117] "RemoveContainer" containerID="bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a" Dec 03 05:47:17 crc kubenswrapper[4810]: E1203 05:47:17.957206 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a\": container with ID starting with bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a not found: ID does not exist" containerID="bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957241 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a"} err="failed to get container status \"bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a\": rpc error: code = NotFound desc = could not find container \"bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a\": container with ID starting with bf40c7cb5b29e92c42b0d0d8a5d020bdbccfb2232ccb13310b00b7a9925f723a not found: ID does not exist" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957264 4810 scope.go:117] "RemoveContainer" containerID="305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957284 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:47:17 crc kubenswrapper[4810]: E1203 05:47:17.957516 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1\": container with ID starting with 305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1 not found: ID does not exist" containerID="305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957533 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1"} err="failed to get container status \"305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1\": rpc error: code = NotFound desc = could not find container \"305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1\": container with ID starting with 305be6b46362568ef4954f82d3cc07e3f4d5e65c1b4c6a591f36e68613375bd1 not found: ID does not exist" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957546 4810 scope.go:117] "RemoveContainer" containerID="b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869" Dec 03 05:47:17 crc kubenswrapper[4810]: E1203 05:47:17.957751 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869\": container with ID starting with b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869 not found: ID does not exist" containerID="b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957768 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869"} err="failed to get container status \"b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869\": rpc error: code = NotFound desc = could not find container \"b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869\": container with ID starting with b23c90f3b259e4c60b90c9ec32d3675e92304aeb5f718b676bf568a88e27b869 not found: ID does not exist" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.957780 4810 scope.go:117] "RemoveContainer" containerID="ea14a6044b5073a9ce04946750bf9ae58f057206e2ddfb8f0b135d0f35a2a678" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.963539 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hkxh\" (UniqueName: \"kubernetes.io/projected/4d0fa646-0a48-471d-9168-08716ab96d5e-kube-api-access-9hkxh\") pod \"4d0fa646-0a48-471d-9168-08716ab96d5e\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.963574 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-utilities\") pod \"4d0fa646-0a48-471d-9168-08716ab96d5e\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.963678 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-catalog-content\") pod \"4d0fa646-0a48-471d-9168-08716ab96d5e\" (UID: \"4d0fa646-0a48-471d-9168-08716ab96d5e\") " Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.964044 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f97lg\" (UniqueName: \"kubernetes.io/projected/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-kube-api-access-f97lg\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.964062 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.964075 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.978848 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-utilities" (OuterVolumeSpecName: "utilities") pod "4d0fa646-0a48-471d-9168-08716ab96d5e" (UID: "4d0fa646-0a48-471d-9168-08716ab96d5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.981325 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d0fa646-0a48-471d-9168-08716ab96d5e-kube-api-access-9hkxh" (OuterVolumeSpecName: "kube-api-access-9hkxh") pod "4d0fa646-0a48-471d-9168-08716ab96d5e" (UID: "4d0fa646-0a48-471d-9168-08716ab96d5e"). InnerVolumeSpecName "kube-api-access-9hkxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:17 crc kubenswrapper[4810]: I1203 05:47:17.982879 4810 scope.go:117] "RemoveContainer" containerID="551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.006772 4810 scope.go:117] "RemoveContainer" containerID="30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.023725 4810 scope.go:117] "RemoveContainer" containerID="7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.027933 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d0fa646-0a48-471d-9168-08716ab96d5e" (UID: "4d0fa646-0a48-471d-9168-08716ab96d5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.038227 4810 scope.go:117] "RemoveContainer" containerID="551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e" Dec 03 05:47:18 crc kubenswrapper[4810]: E1203 05:47:18.041211 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e\": container with ID starting with 551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e not found: ID does not exist" containerID="551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.041261 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e"} err="failed to get container status \"551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e\": rpc error: code = NotFound desc = could not find container \"551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e\": container with ID starting with 551c7b6fef79b738aa09568f3d6a80180b7272456c029707138a385875a5851e not found: ID does not exist" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.041301 4810 scope.go:117] "RemoveContainer" containerID="30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132" Dec 03 05:47:18 crc kubenswrapper[4810]: E1203 05:47:18.041645 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132\": container with ID starting with 30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132 not found: ID does not exist" containerID="30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.041678 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132"} err="failed to get container status \"30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132\": rpc error: code = NotFound desc = could not find container \"30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132\": container with ID starting with 30cbadeae825b14e702f084d1f95b6df37b9f0c94c0389438a24a76a62073132 not found: ID does not exist" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.041700 4810 scope.go:117] "RemoveContainer" containerID="7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39" Dec 03 05:47:18 crc kubenswrapper[4810]: E1203 05:47:18.042087 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39\": container with ID starting with 7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39 not found: ID does not exist" containerID="7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.042114 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39"} err="failed to get container status \"7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39\": rpc error: code = NotFound desc = could not find container \"7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39\": container with ID starting with 7168618bbd83f9100de0d805570120da775bd920bce35ac0712169cc4ed99f39 not found: ID does not exist" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.064960 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-utilities\") pod \"f0b0d549-514c-4f29-901d-de91ae9e5242\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065101 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-catalog-content\") pod \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065132 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-utilities\") pod \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065179 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fgzx\" (UniqueName: \"kubernetes.io/projected/9edee8b2-6d3b-43ae-bee4-28739f3865bc-kube-api-access-9fgzx\") pod \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\" (UID: \"9edee8b2-6d3b-43ae-bee4-28739f3865bc\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065207 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-operator-metrics\") pod \"16fa1024-0d69-4a97-8b3f-172a6591c81a\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065231 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5zct\" (UniqueName: \"kubernetes.io/projected/16fa1024-0d69-4a97-8b3f-172a6591c81a-kube-api-access-h5zct\") pod \"16fa1024-0d69-4a97-8b3f-172a6591c81a\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065248 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72zg4\" (UniqueName: \"kubernetes.io/projected/f0b0d549-514c-4f29-901d-de91ae9e5242-kube-api-access-72zg4\") pod \"f0b0d549-514c-4f29-901d-de91ae9e5242\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065298 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-catalog-content\") pod \"f0b0d549-514c-4f29-901d-de91ae9e5242\" (UID: \"f0b0d549-514c-4f29-901d-de91ae9e5242\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065319 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-trusted-ca\") pod \"16fa1024-0d69-4a97-8b3f-172a6591c81a\" (UID: \"16fa1024-0d69-4a97-8b3f-172a6591c81a\") " Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065535 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hkxh\" (UniqueName: \"kubernetes.io/projected/4d0fa646-0a48-471d-9168-08716ab96d5e-kube-api-access-9hkxh\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065556 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065566 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0fa646-0a48-471d-9168-08716ab96d5e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.065830 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-utilities" (OuterVolumeSpecName: "utilities") pod "f0b0d549-514c-4f29-901d-de91ae9e5242" (UID: "f0b0d549-514c-4f29-901d-de91ae9e5242"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.066212 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "16fa1024-0d69-4a97-8b3f-172a6591c81a" (UID: "16fa1024-0d69-4a97-8b3f-172a6591c81a"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.067143 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-utilities" (OuterVolumeSpecName: "utilities") pod "9edee8b2-6d3b-43ae-bee4-28739f3865bc" (UID: "9edee8b2-6d3b-43ae-bee4-28739f3865bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.075987 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0b0d549-514c-4f29-901d-de91ae9e5242-kube-api-access-72zg4" (OuterVolumeSpecName: "kube-api-access-72zg4") pod "f0b0d549-514c-4f29-901d-de91ae9e5242" (UID: "f0b0d549-514c-4f29-901d-de91ae9e5242"). InnerVolumeSpecName "kube-api-access-72zg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.076208 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "16fa1024-0d69-4a97-8b3f-172a6591c81a" (UID: "16fa1024-0d69-4a97-8b3f-172a6591c81a"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.076362 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16fa1024-0d69-4a97-8b3f-172a6591c81a-kube-api-access-h5zct" (OuterVolumeSpecName: "kube-api-access-h5zct") pod "16fa1024-0d69-4a97-8b3f-172a6591c81a" (UID: "16fa1024-0d69-4a97-8b3f-172a6591c81a"). InnerVolumeSpecName "kube-api-access-h5zct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.081464 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9edee8b2-6d3b-43ae-bee4-28739f3865bc-kube-api-access-9fgzx" (OuterVolumeSpecName: "kube-api-access-9fgzx") pod "9edee8b2-6d3b-43ae-bee4-28739f3865bc" (UID: "9edee8b2-6d3b-43ae-bee4-28739f3865bc"). InnerVolumeSpecName "kube-api-access-9fgzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.088331 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0b0d549-514c-4f29-901d-de91ae9e5242" (UID: "f0b0d549-514c-4f29-901d-de91ae9e5242"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.140400 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xwgs5"] Dec 03 05:47:18 crc kubenswrapper[4810]: W1203 05:47:18.146228 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1de26693_7bac_4062_8ed2_d7f84510be17.slice/crio-b159b068b8b998431c906c33bf1b55b43bd89c58e95e884861d24b6d74b5b633 WatchSource:0}: Error finding container b159b068b8b998431c906c33bf1b55b43bd89c58e95e884861d24b6d74b5b633: Status 404 returned error can't find the container with id b159b068b8b998431c906c33bf1b55b43bd89c58e95e884861d24b6d74b5b633 Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167301 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167382 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167395 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0b0d549-514c-4f29-901d-de91ae9e5242-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167404 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167413 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fgzx\" (UniqueName: \"kubernetes.io/projected/9edee8b2-6d3b-43ae-bee4-28739f3865bc-kube-api-access-9fgzx\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167422 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/16fa1024-0d69-4a97-8b3f-172a6591c81a-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167431 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5zct\" (UniqueName: \"kubernetes.io/projected/16fa1024-0d69-4a97-8b3f-172a6591c81a-kube-api-access-h5zct\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.167442 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72zg4\" (UniqueName: \"kubernetes.io/projected/f0b0d549-514c-4f29-901d-de91ae9e5242-kube-api-access-72zg4\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.176850 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9edee8b2-6d3b-43ae-bee4-28739f3865bc" (UID: "9edee8b2-6d3b-43ae-bee4-28739f3865bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.248524 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mlr8j"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.252184 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mlr8j"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.256640 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m697x"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.260683 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m697x"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.268885 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9edee8b2-6d3b-43ae-bee4-28739f3865bc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.384179 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" path="/var/lib/kubelet/pods/3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d/volumes" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.384955 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" path="/var/lib/kubelet/pods/4d0fa646-0a48-471d-9168-08716ab96d5e/volumes" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.900292 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" event={"ID":"16fa1024-0d69-4a97-8b3f-172a6591c81a","Type":"ContainerDied","Data":"ed9a2cb814a3d5bcae1ecba385258b4f24ac1a0d0f601b99b065ee183a2da5df"} Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.900336 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-22pk2" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.900374 4810 scope.go:117] "RemoveContainer" containerID="65aaef5133d0e2599f4f6fa4de9a103d69088a5aa4cac9c6ec5cffed9de135fd" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.903750 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbbl7" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.903720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbbl7" event={"ID":"9edee8b2-6d3b-43ae-bee4-28739f3865bc","Type":"ContainerDied","Data":"586d9d6f86d370ef3de8841592ff70652f03c389d16003a7e4837046d047d9e7"} Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.905108 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" event={"ID":"1de26693-7bac-4062-8ed2-d7f84510be17","Type":"ContainerStarted","Data":"d44e6fa9f0b4f68b69447e5d63dcd4ddba63c2adc51a5216aac5f5ff7a4aa1bf"} Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.905153 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" event={"ID":"1de26693-7bac-4062-8ed2-d7f84510be17","Type":"ContainerStarted","Data":"b159b068b8b998431c906c33bf1b55b43bd89c58e95e884861d24b6d74b5b633"} Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.906187 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.909166 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.910221 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxjsv" event={"ID":"f0b0d549-514c-4f29-901d-de91ae9e5242","Type":"ContainerDied","Data":"fc6f30374d440c3a24afc8ecd6013d11827e7aa74d802f005942c2404411b063"} Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.910295 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxjsv" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.917955 4810 scope.go:117] "RemoveContainer" containerID="6d0498ae4e05e0b29d278205ebbec637427299144fd204442428bf854e24376f" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.935716 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxjsv"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.936940 4810 scope.go:117] "RemoveContainer" containerID="ea11098e09c5a870ccc0f0591dd7e3857cd8abe4f9788066d66b6aaee26f7cc2" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.940787 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxjsv"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.955289 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xwgs5" podStartSLOduration=1.955267859 podStartE2EDuration="1.955267859s" podCreationTimestamp="2025-12-03 05:47:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:47:18.952568846 +0000 UTC m=+362.888029687" watchObservedRunningTime="2025-12-03 05:47:18.955267859 +0000 UTC m=+362.890728700" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.974168 4810 scope.go:117] "RemoveContainer" containerID="faae3be713b1dc6678429d6c8057a9a379b18321181f49b7aa496bf8bb18aa7b" Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.978479 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kbbl7"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.983112 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kbbl7"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.992588 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-22pk2"] Dec 03 05:47:18 crc kubenswrapper[4810]: I1203 05:47:18.995610 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-22pk2"] Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.008067 4810 scope.go:117] "RemoveContainer" containerID="56e265aef7ef22743cf7486cc613e75777f03f37b27598954221780ddf2ebc71" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.025485 4810 scope.go:117] "RemoveContainer" containerID="fb0e9b9bad53390e35132c573093b8b9517088d318c69f2ce06d5180a4675ef5" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.040638 4810 scope.go:117] "RemoveContainer" containerID="c87e35e8f74b677a21deb5af1b419076e6d41fc3fe419976d721947d32499e42" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359034 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qbzdc"] Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359275 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359292 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359311 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359318 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359333 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359341 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359351 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359359 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359367 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359375 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359386 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359393 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359402 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359409 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359420 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359426 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359438 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359444 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359452 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359459 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359470 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359477 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="extract-utilities" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359486 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359493 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359504 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359511 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: E1203 05:47:19.359519 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359526 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="extract-content" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359631 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359672 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359689 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" containerName="marketplace-operator" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359700 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359713 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d0fa646-0a48-471d-9168-08716ab96d5e" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.359726 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c72e00b-cee9-4cee-bdd7-9f65dbb60c8d" containerName="registry-server" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.360763 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.364430 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.367352 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbzdc"] Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.483959 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-catalog-content\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.484672 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh275\" (UniqueName: \"kubernetes.io/projected/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-kube-api-access-qh275\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.484843 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-utilities\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.557245 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tr27d"] Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.558249 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.566948 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tr27d"] Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.585940 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh275\" (UniqueName: \"kubernetes.io/projected/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-kube-api-access-qh275\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.586000 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-utilities\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.586050 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-catalog-content\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.586561 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-catalog-content\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.586611 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-utilities\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.608447 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.635533 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh275\" (UniqueName: \"kubernetes.io/projected/dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7-kube-api-access-qh275\") pod \"redhat-marketplace-qbzdc\" (UID: \"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7\") " pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.680385 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.687492 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14e683a3-a7b9-4f89-a03d-bdda8882df24-catalog-content\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.687550 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh5wp\" (UniqueName: \"kubernetes.io/projected/14e683a3-a7b9-4f89-a03d-bdda8882df24-kube-api-access-gh5wp\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.687577 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14e683a3-a7b9-4f89-a03d-bdda8882df24-utilities\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.788488 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14e683a3-a7b9-4f89-a03d-bdda8882df24-catalog-content\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.788549 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh5wp\" (UniqueName: \"kubernetes.io/projected/14e683a3-a7b9-4f89-a03d-bdda8882df24-kube-api-access-gh5wp\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.788571 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14e683a3-a7b9-4f89-a03d-bdda8882df24-utilities\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.789111 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14e683a3-a7b9-4f89-a03d-bdda8882df24-catalog-content\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.789142 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14e683a3-a7b9-4f89-a03d-bdda8882df24-utilities\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.806920 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh5wp\" (UniqueName: \"kubernetes.io/projected/14e683a3-a7b9-4f89-a03d-bdda8882df24-kube-api-access-gh5wp\") pod \"certified-operators-tr27d\" (UID: \"14e683a3-a7b9-4f89-a03d-bdda8882df24\") " pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:19 crc kubenswrapper[4810]: I1203 05:47:19.924967 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.122550 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbzdc"] Dec 03 05:47:20 crc kubenswrapper[4810]: W1203 05:47:20.134501 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc2d13cd_9117_4326_a6b1_3bdbcf6b81f7.slice/crio-3fe08c00b0633e8801df9d6b5bdd1d7eba2b124284cabdcac39a512d69687c87 WatchSource:0}: Error finding container 3fe08c00b0633e8801df9d6b5bdd1d7eba2b124284cabdcac39a512d69687c87: Status 404 returned error can't find the container with id 3fe08c00b0633e8801df9d6b5bdd1d7eba2b124284cabdcac39a512d69687c87 Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.333125 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tr27d"] Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.383991 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16fa1024-0d69-4a97-8b3f-172a6591c81a" path="/var/lib/kubelet/pods/16fa1024-0d69-4a97-8b3f-172a6591c81a/volumes" Dec 03 05:47:20 crc kubenswrapper[4810]: E1203 05:47:20.384172 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc2d13cd_9117_4326_a6b1_3bdbcf6b81f7.slice/crio-453cf6d7bf022ac3fa019a1bcc2a7970eeb1ec69797fe2c67271dec94654d262.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc2d13cd_9117_4326_a6b1_3bdbcf6b81f7.slice/crio-conmon-453cf6d7bf022ac3fa019a1bcc2a7970eeb1ec69797fe2c67271dec94654d262.scope\": RecentStats: unable to find data in memory cache]" Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.384513 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9edee8b2-6d3b-43ae-bee4-28739f3865bc" path="/var/lib/kubelet/pods/9edee8b2-6d3b-43ae-bee4-28739f3865bc/volumes" Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.385084 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0b0d549-514c-4f29-901d-de91ae9e5242" path="/var/lib/kubelet/pods/f0b0d549-514c-4f29-901d-de91ae9e5242/volumes" Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.933847 4810 generic.go:334] "Generic (PLEG): container finished" podID="14e683a3-a7b9-4f89-a03d-bdda8882df24" containerID="bb3aae3c90391f343a3cf35f95fb967546c4a7100edeee0d1218618aeebf5733" exitCode=0 Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.933949 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tr27d" event={"ID":"14e683a3-a7b9-4f89-a03d-bdda8882df24","Type":"ContainerDied","Data":"bb3aae3c90391f343a3cf35f95fb967546c4a7100edeee0d1218618aeebf5733"} Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.934343 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tr27d" event={"ID":"14e683a3-a7b9-4f89-a03d-bdda8882df24","Type":"ContainerStarted","Data":"124821c3ed6f63b73ee0d89c0fec9eb24d924bd4c4d20dee2cc6ed693e508765"} Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.935820 4810 generic.go:334] "Generic (PLEG): container finished" podID="dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7" containerID="453cf6d7bf022ac3fa019a1bcc2a7970eeb1ec69797fe2c67271dec94654d262" exitCode=0 Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.935906 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbzdc" event={"ID":"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7","Type":"ContainerDied","Data":"453cf6d7bf022ac3fa019a1bcc2a7970eeb1ec69797fe2c67271dec94654d262"} Dec 03 05:47:20 crc kubenswrapper[4810]: I1203 05:47:20.935931 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbzdc" event={"ID":"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7","Type":"ContainerStarted","Data":"3fe08c00b0633e8801df9d6b5bdd1d7eba2b124284cabdcac39a512d69687c87"} Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.769164 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q46cp"] Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.771209 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.774342 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.776485 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q46cp"] Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.920470 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-utilities\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.920552 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-catalog-content\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.920632 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pkpm\" (UniqueName: \"kubernetes.io/projected/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-kube-api-access-6pkpm\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.944827 4810 generic.go:334] "Generic (PLEG): container finished" podID="dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7" containerID="5f2766426272fefec435c06dc61bc44e28daf3a05d70417f9a602cbf74b25312" exitCode=0 Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.945409 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbzdc" event={"ID":"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7","Type":"ContainerDied","Data":"5f2766426272fefec435c06dc61bc44e28daf3a05d70417f9a602cbf74b25312"} Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.948824 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tr27d" event={"ID":"14e683a3-a7b9-4f89-a03d-bdda8882df24","Type":"ContainerStarted","Data":"dbdd01fb4a9c0ef787766a977e81a2df262f5a9a1614b9b8fd2fa5402c0f8bfc"} Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.963373 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hmz68"] Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.967623 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.970161 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 05:47:21 crc kubenswrapper[4810]: I1203 05:47:21.976262 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hmz68"] Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.021648 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-catalog-content\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.021699 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pkpm\" (UniqueName: \"kubernetes.io/projected/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-kube-api-access-6pkpm\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.021772 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-utilities\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.022270 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-catalog-content\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.022302 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-utilities\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.041976 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pkpm\" (UniqueName: \"kubernetes.io/projected/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-kube-api-access-6pkpm\") pod \"community-operators-q46cp\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.086234 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.123574 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-catalog-content\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.123648 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-utilities\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.123892 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msmxm\" (UniqueName: \"kubernetes.io/projected/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-kube-api-access-msmxm\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.225907 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-utilities\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.225952 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-catalog-content\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.225995 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msmxm\" (UniqueName: \"kubernetes.io/projected/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-kube-api-access-msmxm\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.226718 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-catalog-content\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.226927 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-utilities\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.260565 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msmxm\" (UniqueName: \"kubernetes.io/projected/f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f-kube-api-access-msmxm\") pod \"redhat-operators-hmz68\" (UID: \"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f\") " pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.338280 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.489637 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q46cp"] Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.779485 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hmz68"] Dec 03 05:47:22 crc kubenswrapper[4810]: W1203 05:47:22.784914 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2171748_a3a1_4f3a_b0b4_b0e5ab8d588f.slice/crio-ec9de318983c2b82e3e255961fe0f5db6e940725a1d3fba1864d7fea7c402dbf WatchSource:0}: Error finding container ec9de318983c2b82e3e255961fe0f5db6e940725a1d3fba1864d7fea7c402dbf: Status 404 returned error can't find the container with id ec9de318983c2b82e3e255961fe0f5db6e940725a1d3fba1864d7fea7c402dbf Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.955856 4810 generic.go:334] "Generic (PLEG): container finished" podID="f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f" containerID="ee4564583a90424d111049397ffaaf3d8225eba31aa9e0040176fca4a591e2c2" exitCode=0 Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.955926 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmz68" event={"ID":"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f","Type":"ContainerDied","Data":"ee4564583a90424d111049397ffaaf3d8225eba31aa9e0040176fca4a591e2c2"} Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.955957 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmz68" event={"ID":"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f","Type":"ContainerStarted","Data":"ec9de318983c2b82e3e255961fe0f5db6e940725a1d3fba1864d7fea7c402dbf"} Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.959312 4810 generic.go:334] "Generic (PLEG): container finished" podID="14e683a3-a7b9-4f89-a03d-bdda8882df24" containerID="dbdd01fb4a9c0ef787766a977e81a2df262f5a9a1614b9b8fd2fa5402c0f8bfc" exitCode=0 Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.959374 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tr27d" event={"ID":"14e683a3-a7b9-4f89-a03d-bdda8882df24","Type":"ContainerDied","Data":"dbdd01fb4a9c0ef787766a977e81a2df262f5a9a1614b9b8fd2fa5402c0f8bfc"} Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.962235 4810 generic.go:334] "Generic (PLEG): container finished" podID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerID="ed2dd30de4f04eb3d9b1d63a2b21cf2c5f857ecccddfd0dc0f0fd2fdf056e93d" exitCode=0 Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.962299 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerDied","Data":"ed2dd30de4f04eb3d9b1d63a2b21cf2c5f857ecccddfd0dc0f0fd2fdf056e93d"} Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.962325 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerStarted","Data":"a79e6a46c6ba2f1a0f5e3f0d4701895e2d838e8e661d19b768b44ef86af89e1b"} Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.966097 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbzdc" event={"ID":"dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7","Type":"ContainerStarted","Data":"f2c2ea4cf33352f2d35a2a1a6fb9d0602bc7da6ba90f3b2727643d4fa7a66af1"} Dec 03 05:47:22 crc kubenswrapper[4810]: I1203 05:47:22.998208 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qbzdc" podStartSLOduration=2.3950670929999998 podStartE2EDuration="3.998091482s" podCreationTimestamp="2025-12-03 05:47:19 +0000 UTC" firstStartedPulling="2025-12-03 05:47:20.938002009 +0000 UTC m=+364.873462850" lastFinishedPulling="2025-12-03 05:47:22.541026398 +0000 UTC m=+366.476487239" observedRunningTime="2025-12-03 05:47:22.997225718 +0000 UTC m=+366.932686559" watchObservedRunningTime="2025-12-03 05:47:22.998091482 +0000 UTC m=+366.933552323" Dec 03 05:47:23 crc kubenswrapper[4810]: I1203 05:47:23.973396 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tr27d" event={"ID":"14e683a3-a7b9-4f89-a03d-bdda8882df24","Type":"ContainerStarted","Data":"dad828f0b7052c32339a549e9ece1ce727dfcf48481bcd41b25ebebf441a398e"} Dec 03 05:47:23 crc kubenswrapper[4810]: I1203 05:47:23.977133 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerStarted","Data":"677b5bb197788eef5cd3b8b0def924df49180ee0eaa4d4cebbd377c531079944"} Dec 03 05:47:23 crc kubenswrapper[4810]: I1203 05:47:23.979332 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmz68" event={"ID":"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f","Type":"ContainerStarted","Data":"e597da8bac7544662a06994d3c87bfe54a26f8b8e1b259282bdbda8936282f53"} Dec 03 05:47:23 crc kubenswrapper[4810]: I1203 05:47:23.996853 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tr27d" podStartSLOduration=2.482582249 podStartE2EDuration="4.996837447s" podCreationTimestamp="2025-12-03 05:47:19 +0000 UTC" firstStartedPulling="2025-12-03 05:47:20.938192785 +0000 UTC m=+364.873653666" lastFinishedPulling="2025-12-03 05:47:23.452448023 +0000 UTC m=+367.387908864" observedRunningTime="2025-12-03 05:47:23.995653555 +0000 UTC m=+367.931114396" watchObservedRunningTime="2025-12-03 05:47:23.996837447 +0000 UTC m=+367.932298278" Dec 03 05:47:24 crc kubenswrapper[4810]: I1203 05:47:24.985092 4810 generic.go:334] "Generic (PLEG): container finished" podID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerID="677b5bb197788eef5cd3b8b0def924df49180ee0eaa4d4cebbd377c531079944" exitCode=0 Dec 03 05:47:24 crc kubenswrapper[4810]: I1203 05:47:24.985788 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerDied","Data":"677b5bb197788eef5cd3b8b0def924df49180ee0eaa4d4cebbd377c531079944"} Dec 03 05:47:24 crc kubenswrapper[4810]: I1203 05:47:24.989655 4810 generic.go:334] "Generic (PLEG): container finished" podID="f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f" containerID="e597da8bac7544662a06994d3c87bfe54a26f8b8e1b259282bdbda8936282f53" exitCode=0 Dec 03 05:47:24 crc kubenswrapper[4810]: I1203 05:47:24.989705 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmz68" event={"ID":"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f","Type":"ContainerDied","Data":"e597da8bac7544662a06994d3c87bfe54a26f8b8e1b259282bdbda8936282f53"} Dec 03 05:47:25 crc kubenswrapper[4810]: I1203 05:47:25.677144 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:47:25 crc kubenswrapper[4810]: I1203 05:47:25.677221 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:47:27 crc kubenswrapper[4810]: I1203 05:47:27.019331 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmz68" event={"ID":"f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f","Type":"ContainerStarted","Data":"dd1906d3f03294897b4f1e4f9ae017183a85dee6b4ae5c6cd3fccbbd0758efe9"} Dec 03 05:47:27 crc kubenswrapper[4810]: I1203 05:47:27.042134 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hmz68" podStartSLOduration=3.518563859 podStartE2EDuration="6.042108367s" podCreationTimestamp="2025-12-03 05:47:21 +0000 UTC" firstStartedPulling="2025-12-03 05:47:22.957365411 +0000 UTC m=+366.892826252" lastFinishedPulling="2025-12-03 05:47:25.480909919 +0000 UTC m=+369.416370760" observedRunningTime="2025-12-03 05:47:27.040530314 +0000 UTC m=+370.975991155" watchObservedRunningTime="2025-12-03 05:47:27.042108367 +0000 UTC m=+370.977569208" Dec 03 05:47:28 crc kubenswrapper[4810]: I1203 05:47:28.030888 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerStarted","Data":"6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde"} Dec 03 05:47:28 crc kubenswrapper[4810]: I1203 05:47:28.055423 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q46cp" podStartSLOduration=4.048309276 podStartE2EDuration="7.055400954s" podCreationTimestamp="2025-12-03 05:47:21 +0000 UTC" firstStartedPulling="2025-12-03 05:47:22.963459826 +0000 UTC m=+366.898920667" lastFinishedPulling="2025-12-03 05:47:25.970551504 +0000 UTC m=+369.906012345" observedRunningTime="2025-12-03 05:47:28.051468018 +0000 UTC m=+371.986928869" watchObservedRunningTime="2025-12-03 05:47:28.055400954 +0000 UTC m=+371.990861795" Dec 03 05:47:29 crc kubenswrapper[4810]: I1203 05:47:29.681165 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:29 crc kubenswrapper[4810]: I1203 05:47:29.681223 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:29 crc kubenswrapper[4810]: I1203 05:47:29.734426 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:29 crc kubenswrapper[4810]: I1203 05:47:29.925981 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:29 crc kubenswrapper[4810]: I1203 05:47:29.927081 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:29 crc kubenswrapper[4810]: I1203 05:47:29.967916 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:30 crc kubenswrapper[4810]: I1203 05:47:30.075031 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qbzdc" Dec 03 05:47:30 crc kubenswrapper[4810]: I1203 05:47:30.092593 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tr27d" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.087021 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.087337 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.136186 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.339967 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.340077 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.388482 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.422425 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-55x88" Dec 03 05:47:32 crc kubenswrapper[4810]: I1203 05:47:32.475309 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cqx4p"] Dec 03 05:47:33 crc kubenswrapper[4810]: I1203 05:47:33.096464 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:47:33 crc kubenswrapper[4810]: I1203 05:47:33.100014 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hmz68" Dec 03 05:47:55 crc kubenswrapper[4810]: I1203 05:47:55.677894 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:47:55 crc kubenswrapper[4810]: I1203 05:47:55.678767 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:47:57 crc kubenswrapper[4810]: I1203 05:47:57.517460 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" podUID="73510d8a-e4fc-4187-bb00-e4d9435c8d33" containerName="registry" containerID="cri-o://8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5" gracePeriod=30 Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.025615 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185026 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-bound-sa-token\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185117 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/73510d8a-e4fc-4187-bb00-e4d9435c8d33-installation-pull-secrets\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185152 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lvdb\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-kube-api-access-4lvdb\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185181 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/73510d8a-e4fc-4187-bb00-e4d9435c8d33-ca-trust-extracted\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185219 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-certificates\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185266 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-trusted-ca\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.185287 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-tls\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.186867 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.186913 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\" (UID: \"73510d8a-e4fc-4187-bb00-e4d9435c8d33\") " Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.187189 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.187165 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.194046 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.194934 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.195214 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73510d8a-e4fc-4187-bb00-e4d9435c8d33-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.196259 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-kube-api-access-4lvdb" (OuterVolumeSpecName: "kube-api-access-4lvdb") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "kube-api-access-4lvdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.202298 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73510d8a-e4fc-4187-bb00-e4d9435c8d33-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.203458 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "73510d8a-e4fc-4187-bb00-e4d9435c8d33" (UID: "73510d8a-e4fc-4187-bb00-e4d9435c8d33"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.210522 4810 generic.go:334] "Generic (PLEG): container finished" podID="73510d8a-e4fc-4187-bb00-e4d9435c8d33" containerID="8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5" exitCode=0 Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.210568 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" event={"ID":"73510d8a-e4fc-4187-bb00-e4d9435c8d33","Type":"ContainerDied","Data":"8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5"} Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.210600 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" event={"ID":"73510d8a-e4fc-4187-bb00-e4d9435c8d33","Type":"ContainerDied","Data":"0d8eb676d0329e319172f7ede50385f2ebf216d9ffd75b056a1a6794345d32e4"} Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.210599 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cqx4p" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.210620 4810 scope.go:117] "RemoveContainer" containerID="8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.261430 4810 scope.go:117] "RemoveContainer" containerID="8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5" Dec 03 05:47:58 crc kubenswrapper[4810]: E1203 05:47:58.263284 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5\": container with ID starting with 8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5 not found: ID does not exist" containerID="8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.263486 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5"} err="failed to get container status \"8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5\": rpc error: code = NotFound desc = could not find container \"8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5\": container with ID starting with 8b7b49c4fc43f7939f1c2fa2870150d0dba64b87330024ea2a2f80dbdb1fa4d5 not found: ID does not exist" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.271805 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cqx4p"] Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.278981 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cqx4p"] Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.288404 4810 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.288464 4810 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.288479 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.288492 4810 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/73510d8a-e4fc-4187-bb00-e4d9435c8d33-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.288505 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lvdb\" (UniqueName: \"kubernetes.io/projected/73510d8a-e4fc-4187-bb00-e4d9435c8d33-kube-api-access-4lvdb\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.288517 4810 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/73510d8a-e4fc-4187-bb00-e4d9435c8d33-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 05:47:58 crc kubenswrapper[4810]: I1203 05:47:58.389857 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73510d8a-e4fc-4187-bb00-e4d9435c8d33" path="/var/lib/kubelet/pods/73510d8a-e4fc-4187-bb00-e4d9435c8d33/volumes" Dec 03 05:48:25 crc kubenswrapper[4810]: I1203 05:48:25.677102 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:48:25 crc kubenswrapper[4810]: I1203 05:48:25.678889 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:48:25 crc kubenswrapper[4810]: I1203 05:48:25.678974 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:48:25 crc kubenswrapper[4810]: I1203 05:48:25.679928 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bbc0200392c33056613f2d06cbccb7949a4ac2b9f3cdb5326da9ff914ce1c363"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 05:48:25 crc kubenswrapper[4810]: I1203 05:48:25.680089 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://bbc0200392c33056613f2d06cbccb7949a4ac2b9f3cdb5326da9ff914ce1c363" gracePeriod=600 Dec 03 05:48:26 crc kubenswrapper[4810]: I1203 05:48:26.417653 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="bbc0200392c33056613f2d06cbccb7949a4ac2b9f3cdb5326da9ff914ce1c363" exitCode=0 Dec 03 05:48:26 crc kubenswrapper[4810]: I1203 05:48:26.417787 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"bbc0200392c33056613f2d06cbccb7949a4ac2b9f3cdb5326da9ff914ce1c363"} Dec 03 05:48:26 crc kubenswrapper[4810]: I1203 05:48:26.418126 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"79f862664f90d390c6e62915bb612e84b518e4e80e7a906eba37def23e22efe1"} Dec 03 05:48:26 crc kubenswrapper[4810]: I1203 05:48:26.418153 4810 scope.go:117] "RemoveContainer" containerID="8b5d149ae892c6cecab750b9bfc5bf5f9ea67490667ea0ec42bc63db12e01e97" Dec 03 05:50:25 crc kubenswrapper[4810]: I1203 05:50:25.678233 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:50:25 crc kubenswrapper[4810]: I1203 05:50:25.679964 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:50:55 crc kubenswrapper[4810]: I1203 05:50:55.677174 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:50:55 crc kubenswrapper[4810]: I1203 05:50:55.677768 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:51:25 crc kubenswrapper[4810]: I1203 05:51:25.677086 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:51:25 crc kubenswrapper[4810]: I1203 05:51:25.677701 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:51:25 crc kubenswrapper[4810]: I1203 05:51:25.677808 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:51:25 crc kubenswrapper[4810]: I1203 05:51:25.678409 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79f862664f90d390c6e62915bb612e84b518e4e80e7a906eba37def23e22efe1"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 05:51:25 crc kubenswrapper[4810]: I1203 05:51:25.678463 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://79f862664f90d390c6e62915bb612e84b518e4e80e7a906eba37def23e22efe1" gracePeriod=600 Dec 03 05:51:26 crc kubenswrapper[4810]: I1203 05:51:26.757929 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="79f862664f90d390c6e62915bb612e84b518e4e80e7a906eba37def23e22efe1" exitCode=0 Dec 03 05:51:26 crc kubenswrapper[4810]: I1203 05:51:26.757970 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"79f862664f90d390c6e62915bb612e84b518e4e80e7a906eba37def23e22efe1"} Dec 03 05:51:26 crc kubenswrapper[4810]: I1203 05:51:26.758492 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"40837cb8664b98412109347434fe923106e68d7421229ce0ff512909a9e08061"} Dec 03 05:51:26 crc kubenswrapper[4810]: I1203 05:51:26.758512 4810 scope.go:117] "RemoveContainer" containerID="bbc0200392c33056613f2d06cbccb7949a4ac2b9f3cdb5326da9ff914ce1c363" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.556463 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-9d7bx"] Dec 03 05:52:10 crc kubenswrapper[4810]: E1203 05:52:10.557165 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73510d8a-e4fc-4187-bb00-e4d9435c8d33" containerName="registry" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.557178 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="73510d8a-e4fc-4187-bb00-e4d9435c8d33" containerName="registry" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.557293 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="73510d8a-e4fc-4187-bb00-e4d9435c8d33" containerName="registry" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.557778 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.560542 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-4t776" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.560796 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.561665 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.568375 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-9d7bx"] Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.574110 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sb4vh"] Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.574710 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-sb4vh" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.577176 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-mfnbw" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.593988 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sb4vh"] Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.603467 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-cg57k"] Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.604313 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.606339 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-h88p5" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.610232 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-cg57k"] Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.667374 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqprl\" (UniqueName: \"kubernetes.io/projected/d895f771-5889-476b-9f46-1a2930561552-kube-api-access-cqprl\") pod \"cert-manager-cainjector-7f985d654d-9d7bx\" (UID: \"d895f771-5889-476b-9f46-1a2930561552\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.768573 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqprl\" (UniqueName: \"kubernetes.io/projected/d895f771-5889-476b-9f46-1a2930561552-kube-api-access-cqprl\") pod \"cert-manager-cainjector-7f985d654d-9d7bx\" (UID: \"d895f771-5889-476b-9f46-1a2930561552\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.768690 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpkcp\" (UniqueName: \"kubernetes.io/projected/30155c9d-effd-4dd4-8893-afcf98aa730d-kube-api-access-dpkcp\") pod \"cert-manager-5b446d88c5-sb4vh\" (UID: \"30155c9d-effd-4dd4-8893-afcf98aa730d\") " pod="cert-manager/cert-manager-5b446d88c5-sb4vh" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.768725 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w29g\" (UniqueName: \"kubernetes.io/projected/5bac17b1-b4d3-423f-8303-219d40d0c765-kube-api-access-2w29g\") pod \"cert-manager-webhook-5655c58dd6-cg57k\" (UID: \"5bac17b1-b4d3-423f-8303-219d40d0c765\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.793313 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqprl\" (UniqueName: \"kubernetes.io/projected/d895f771-5889-476b-9f46-1a2930561552-kube-api-access-cqprl\") pod \"cert-manager-cainjector-7f985d654d-9d7bx\" (UID: \"d895f771-5889-476b-9f46-1a2930561552\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.870278 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpkcp\" (UniqueName: \"kubernetes.io/projected/30155c9d-effd-4dd4-8893-afcf98aa730d-kube-api-access-dpkcp\") pod \"cert-manager-5b446d88c5-sb4vh\" (UID: \"30155c9d-effd-4dd4-8893-afcf98aa730d\") " pod="cert-manager/cert-manager-5b446d88c5-sb4vh" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.870347 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w29g\" (UniqueName: \"kubernetes.io/projected/5bac17b1-b4d3-423f-8303-219d40d0c765-kube-api-access-2w29g\") pod \"cert-manager-webhook-5655c58dd6-cg57k\" (UID: \"5bac17b1-b4d3-423f-8303-219d40d0c765\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.881992 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.889698 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w29g\" (UniqueName: \"kubernetes.io/projected/5bac17b1-b4d3-423f-8303-219d40d0c765-kube-api-access-2w29g\") pod \"cert-manager-webhook-5655c58dd6-cg57k\" (UID: \"5bac17b1-b4d3-423f-8303-219d40d0c765\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.892420 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpkcp\" (UniqueName: \"kubernetes.io/projected/30155c9d-effd-4dd4-8893-afcf98aa730d-kube-api-access-dpkcp\") pod \"cert-manager-5b446d88c5-sb4vh\" (UID: \"30155c9d-effd-4dd4-8893-afcf98aa730d\") " pod="cert-manager/cert-manager-5b446d88c5-sb4vh" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.899720 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-sb4vh" Dec 03 05:52:10 crc kubenswrapper[4810]: I1203 05:52:10.919348 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:11 crc kubenswrapper[4810]: I1203 05:52:11.098806 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-9d7bx"] Dec 03 05:52:11 crc kubenswrapper[4810]: W1203 05:52:11.108217 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd895f771_5889_476b_9f46_1a2930561552.slice/crio-5e25eaf4d8be99f90ac4f671a42742d6317cc1371124791eb1dfc826ba1985d4 WatchSource:0}: Error finding container 5e25eaf4d8be99f90ac4f671a42742d6317cc1371124791eb1dfc826ba1985d4: Status 404 returned error can't find the container with id 5e25eaf4d8be99f90ac4f671a42742d6317cc1371124791eb1dfc826ba1985d4 Dec 03 05:52:11 crc kubenswrapper[4810]: I1203 05:52:11.114688 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 05:52:11 crc kubenswrapper[4810]: I1203 05:52:11.136040 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sb4vh"] Dec 03 05:52:11 crc kubenswrapper[4810]: I1203 05:52:11.163857 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-cg57k"] Dec 03 05:52:11 crc kubenswrapper[4810]: W1203 05:52:11.168538 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bac17b1_b4d3_423f_8303_219d40d0c765.slice/crio-cd905b4ff8e07bd07800a57e84213582160d3df839b0a3c184a623fc0f760037 WatchSource:0}: Error finding container cd905b4ff8e07bd07800a57e84213582160d3df839b0a3c184a623fc0f760037: Status 404 returned error can't find the container with id cd905b4ff8e07bd07800a57e84213582160d3df839b0a3c184a623fc0f760037 Dec 03 05:52:12 crc kubenswrapper[4810]: I1203 05:52:12.022926 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" event={"ID":"d895f771-5889-476b-9f46-1a2930561552","Type":"ContainerStarted","Data":"5e25eaf4d8be99f90ac4f671a42742d6317cc1371124791eb1dfc826ba1985d4"} Dec 03 05:52:12 crc kubenswrapper[4810]: I1203 05:52:12.023669 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" event={"ID":"5bac17b1-b4d3-423f-8303-219d40d0c765","Type":"ContainerStarted","Data":"cd905b4ff8e07bd07800a57e84213582160d3df839b0a3c184a623fc0f760037"} Dec 03 05:52:12 crc kubenswrapper[4810]: I1203 05:52:12.024782 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-sb4vh" event={"ID":"30155c9d-effd-4dd4-8893-afcf98aa730d","Type":"ContainerStarted","Data":"32161a570e6779454c9ba892481b38e36988f27432ccce894c4b4ebbc3a33e24"} Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.068477 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-sb4vh" event={"ID":"30155c9d-effd-4dd4-8893-afcf98aa730d","Type":"ContainerStarted","Data":"a46ec0ba3bcb15b8390be3c182fee0b73a06df41c2b1cbe45d4049b067db6777"} Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.070863 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" event={"ID":"5bac17b1-b4d3-423f-8303-219d40d0c765","Type":"ContainerStarted","Data":"7bbaa223e88004bac4278071b6e111ab466cce4bbec3e44dc1d5771d2a81a60b"} Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.071320 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.073425 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" event={"ID":"d895f771-5889-476b-9f46-1a2930561552","Type":"ContainerStarted","Data":"3de3e1fde814f3138e27296cb0f8a060e590f76a406d06498f420c65c337ec87"} Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.094778 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-sb4vh" podStartSLOduration=2.278792928 podStartE2EDuration="6.09472168s" podCreationTimestamp="2025-12-03 05:52:10 +0000 UTC" firstStartedPulling="2025-12-03 05:52:11.143479563 +0000 UTC m=+655.078940404" lastFinishedPulling="2025-12-03 05:52:14.959408315 +0000 UTC m=+658.894869156" observedRunningTime="2025-12-03 05:52:16.085695559 +0000 UTC m=+660.021156400" watchObservedRunningTime="2025-12-03 05:52:16.09472168 +0000 UTC m=+660.030182521" Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.109335 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-9d7bx" podStartSLOduration=2.316123951 podStartE2EDuration="6.109311857s" podCreationTimestamp="2025-12-03 05:52:10 +0000 UTC" firstStartedPulling="2025-12-03 05:52:11.114252808 +0000 UTC m=+655.049713649" lastFinishedPulling="2025-12-03 05:52:14.907440704 +0000 UTC m=+658.842901555" observedRunningTime="2025-12-03 05:52:16.104510419 +0000 UTC m=+660.039971260" watchObservedRunningTime="2025-12-03 05:52:16.109311857 +0000 UTC m=+660.044772698" Dec 03 05:52:16 crc kubenswrapper[4810]: I1203 05:52:16.124103 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" podStartSLOduration=2.343530862 podStartE2EDuration="6.124048707s" podCreationTimestamp="2025-12-03 05:52:10 +0000 UTC" firstStartedPulling="2025-12-03 05:52:11.170378782 +0000 UTC m=+655.105839623" lastFinishedPulling="2025-12-03 05:52:14.950896627 +0000 UTC m=+658.886357468" observedRunningTime="2025-12-03 05:52:16.118326287 +0000 UTC m=+660.053787128" watchObservedRunningTime="2025-12-03 05:52:16.124048707 +0000 UTC m=+660.059509538" Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.921564 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-cg57k" Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975035 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-64tlm"] Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975501 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-controller" containerID="cri-o://94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" gracePeriod=30 Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975559 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="nbdb" containerID="cri-o://2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" gracePeriod=30 Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975654 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="northd" containerID="cri-o://442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" gracePeriod=30 Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975796 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="sbdb" containerID="cri-o://aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" gracePeriod=30 Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975848 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-node" containerID="cri-o://61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" gracePeriod=30 Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975789 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-acl-logging" containerID="cri-o://873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" gracePeriod=30 Dec 03 05:52:20 crc kubenswrapper[4810]: I1203 05:52:20.975633 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" gracePeriod=30 Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.019608 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" containerID="cri-o://32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" gracePeriod=30 Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.106411 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/2.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.106847 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/1.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.106886 4810 generic.go:334] "Generic (PLEG): container finished" podID="61ac6c2e-df95-49c5-a959-0e061e9c5909" containerID="75a1cbee2ae5b02f05cab9fd6b558ab3a7960ce58042a9d876cfb52c90f3e927" exitCode=2 Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.106938 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerDied","Data":"75a1cbee2ae5b02f05cab9fd6b558ab3a7960ce58042a9d876cfb52c90f3e927"} Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.106986 4810 scope.go:117] "RemoveContainer" containerID="ed2368d56cd10ef831d599a71e3b66818088f6d0969a8c137f5b7ec2bfe85533" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.107588 4810 scope.go:117] "RemoveContainer" containerID="75a1cbee2ae5b02f05cab9fd6b558ab3a7960ce58042a9d876cfb52c90f3e927" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.107777 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-4279f_openshift-multus(61ac6c2e-df95-49c5-a959-0e061e9c5909)\"" pod="openshift-multus/multus-4279f" podUID="61ac6c2e-df95-49c5-a959-0e061e9c5909" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.120555 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/3.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.124937 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovn-acl-logging/0.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.128543 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovn-controller/0.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.130014 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" exitCode=0 Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.130045 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" exitCode=143 Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.130091 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.130130 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.259388 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/3.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.261681 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovn-acl-logging/0.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.262153 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovn-controller/0.log" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.262613 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.308891 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m58g6"] Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309102 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309113 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309122 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-acl-logging" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309128 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-acl-logging" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309137 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="nbdb" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309144 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="nbdb" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309155 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309161 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309168 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kubecfg-setup" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309173 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kubecfg-setup" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309181 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309188 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309199 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="sbdb" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309207 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="sbdb" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309216 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="northd" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309222 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="northd" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309229 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309235 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309243 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309248 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309257 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-node" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309263 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-node" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309352 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="northd" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309362 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-acl-logging" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309370 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309377 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309384 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309390 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="sbdb" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309397 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309404 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="kube-rbac-proxy-node" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309411 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovn-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309420 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="nbdb" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309513 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309522 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: E1203 05:52:21.309529 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309535 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309618 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.309812 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerName="ovnkube-controller" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.311236 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.413288 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-systemd\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.413347 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-ovn-kubernetes\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.413389 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-env-overrides\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.413425 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.413463 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-script-lib\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.413879 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414022 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414067 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-openvswitch\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414093 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414093 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414144 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-systemd-units\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414200 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414214 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414231 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwp8k\" (UniqueName: \"kubernetes.io/projected/885c296a-449b-4977-b6d9-396bc84d3cfa-kube-api-access-pwp8k\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414272 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-netns\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414318 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-etc-openvswitch\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414366 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414404 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-log-socket\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414430 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-log-socket" (OuterVolumeSpecName: "log-socket") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414459 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414516 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-slash" (OuterVolumeSpecName: "host-slash") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414491 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-slash\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414966 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/885c296a-449b-4977-b6d9-396bc84d3cfa-ovn-node-metrics-cert\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.414988 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-bin\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415350 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415380 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-var-lib-openvswitch\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415410 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-netd\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415430 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-kubelet\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415458 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-config\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415466 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415482 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-node-log\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415493 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415510 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-ovn\") pod \"885c296a-449b-4977-b6d9-396bc84d3cfa\" (UID: \"885c296a-449b-4977-b6d9-396bc84d3cfa\") " Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415958 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-run-ovn-kubernetes\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415512 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415915 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415939 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-node-log" (OuterVolumeSpecName: "node-log") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.415959 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416001 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416061 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovn-node-metrics-cert\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416084 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovnkube-config\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416103 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-cni-bin\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416133 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-log-socket\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416147 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-var-lib-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416168 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-slash\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416184 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-env-overrides\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416206 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovnkube-script-lib\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416223 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416243 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-etc-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416266 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-systemd\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416282 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-ovn\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416296 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-run-netns\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416317 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-node-log\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416341 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8296k\" (UniqueName: \"kubernetes.io/projected/8eafafc1-1279-4e10-91a2-1b44158e99cf-kube-api-access-8296k\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416357 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-cni-netd\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416385 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-systemd-units\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416406 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-kubelet\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416442 4810 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416453 4810 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416463 4810 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416471 4810 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416479 4810 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416487 4810 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416495 4810 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416504 4810 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416512 4810 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416520 4810 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416528 4810 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416538 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416548 4810 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416559 4810 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416569 4810 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416578 4810 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.416587 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/885c296a-449b-4977-b6d9-396bc84d3cfa-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.419661 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/885c296a-449b-4977-b6d9-396bc84d3cfa-kube-api-access-pwp8k" (OuterVolumeSpecName: "kube-api-access-pwp8k") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "kube-api-access-pwp8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.420347 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885c296a-449b-4977-b6d9-396bc84d3cfa-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.427475 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "885c296a-449b-4977-b6d9-396bc84d3cfa" (UID: "885c296a-449b-4977-b6d9-396bc84d3cfa"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518105 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-ovn\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518361 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-run-netns\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518231 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-ovn\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518427 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-node-log\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518484 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-run-netns\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518512 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8296k\" (UniqueName: \"kubernetes.io/projected/8eafafc1-1279-4e10-91a2-1b44158e99cf-kube-api-access-8296k\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518539 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-cni-netd\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518591 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-systemd-units\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518616 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-kubelet\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518639 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-run-ovn-kubernetes\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518663 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518682 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovn-node-metrics-cert\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518700 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovnkube-config\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518717 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-cni-bin\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518771 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-log-socket\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518792 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-var-lib-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518848 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-env-overrides\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518868 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-slash\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518933 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovnkube-script-lib\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518953 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.518973 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-etc-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519005 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-systemd\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519077 4810 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/885c296a-449b-4977-b6d9-396bc84d3cfa-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519094 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwp8k\" (UniqueName: \"kubernetes.io/projected/885c296a-449b-4977-b6d9-396bc84d3cfa-kube-api-access-pwp8k\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519106 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/885c296a-449b-4977-b6d9-396bc84d3cfa-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519135 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-systemd\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519200 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-log-socket\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519226 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-var-lib-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519381 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-cni-netd\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519414 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-run-ovn-kubernetes\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519566 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-node-log\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519603 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-cni-bin\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519589 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-kubelet\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519623 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-run-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519647 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-etc-openvswitch\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519666 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-slash\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.519585 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8eafafc1-1279-4e10-91a2-1b44158e99cf-systemd-units\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.520533 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-env-overrides\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.520793 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovnkube-script-lib\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.521757 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovnkube-config\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.523109 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8eafafc1-1279-4e10-91a2-1b44158e99cf-ovn-node-metrics-cert\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.542028 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8296k\" (UniqueName: \"kubernetes.io/projected/8eafafc1-1279-4e10-91a2-1b44158e99cf-kube-api-access-8296k\") pod \"ovnkube-node-m58g6\" (UID: \"8eafafc1-1279-4e10-91a2-1b44158e99cf\") " pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:21 crc kubenswrapper[4810]: I1203 05:52:21.630013 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.136989 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovnkube-controller/3.log" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.143226 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovn-acl-logging/0.log" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.143774 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-64tlm_885c296a-449b-4977-b6d9-396bc84d3cfa/ovn-controller/0.log" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144430 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" exitCode=0 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144470 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" exitCode=0 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144480 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" exitCode=0 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144490 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" exitCode=0 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144497 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" exitCode=0 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144503 4810 generic.go:334] "Generic (PLEG): container finished" podID="885c296a-449b-4977-b6d9-396bc84d3cfa" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" exitCode=143 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144519 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144560 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144572 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144582 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144592 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144602 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144614 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144530 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144619 4810 scope.go:117] "RemoveContainer" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144629 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144753 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144766 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144772 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144777 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144782 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144788 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144793 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144810 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-64tlm" event={"ID":"885c296a-449b-4977-b6d9-396bc84d3cfa","Type":"ContainerDied","Data":"c274353c504459a46f005a04fdba617dcd302b6629b8821a19b4f8906b78f53d"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144829 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144835 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144840 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144845 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144851 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144856 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144861 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144867 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144876 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.144882 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.149702 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/2.log" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.151660 4810 generic.go:334] "Generic (PLEG): container finished" podID="8eafafc1-1279-4e10-91a2-1b44158e99cf" containerID="0d4e146cbb5ecbee106b0cccf46da7dc050d1723d6c312ac5be8b3fecf8b6b61" exitCode=0 Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.151708 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerDied","Data":"0d4e146cbb5ecbee106b0cccf46da7dc050d1723d6c312ac5be8b3fecf8b6b61"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.151766 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"f16241defdd1a4e99843faaf7b6972b1fcbbe1f8b4909a52e81bce6b00f5c2fe"} Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.167950 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.199128 4810 scope.go:117] "RemoveContainer" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.200507 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-64tlm"] Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.204718 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-64tlm"] Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.227259 4810 scope.go:117] "RemoveContainer" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.248615 4810 scope.go:117] "RemoveContainer" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.267443 4810 scope.go:117] "RemoveContainer" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.286487 4810 scope.go:117] "RemoveContainer" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.300982 4810 scope.go:117] "RemoveContainer" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.316184 4810 scope.go:117] "RemoveContainer" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.331787 4810 scope.go:117] "RemoveContainer" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.375007 4810 scope.go:117] "RemoveContainer" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.379640 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": container with ID starting with 32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0 not found: ID does not exist" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.379675 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} err="failed to get container status \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": rpc error: code = NotFound desc = could not find container \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": container with ID starting with 32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.379702 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.381713 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": container with ID starting with 2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6 not found: ID does not exist" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.381817 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} err="failed to get container status \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": rpc error: code = NotFound desc = could not find container \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": container with ID starting with 2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.381863 4810 scope.go:117] "RemoveContainer" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.382698 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": container with ID starting with aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac not found: ID does not exist" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.382740 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} err="failed to get container status \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": rpc error: code = NotFound desc = could not find container \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": container with ID starting with aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.382758 4810 scope.go:117] "RemoveContainer" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.383262 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": container with ID starting with 2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1 not found: ID does not exist" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.383289 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} err="failed to get container status \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": rpc error: code = NotFound desc = could not find container \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": container with ID starting with 2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.383307 4810 scope.go:117] "RemoveContainer" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.383880 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": container with ID starting with 442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf not found: ID does not exist" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.383906 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} err="failed to get container status \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": rpc error: code = NotFound desc = could not find container \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": container with ID starting with 442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.383922 4810 scope.go:117] "RemoveContainer" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.384928 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": container with ID starting with 7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5 not found: ID does not exist" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.384973 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} err="failed to get container status \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": rpc error: code = NotFound desc = could not find container \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": container with ID starting with 7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.385015 4810 scope.go:117] "RemoveContainer" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.385194 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="885c296a-449b-4977-b6d9-396bc84d3cfa" path="/var/lib/kubelet/pods/885c296a-449b-4977-b6d9-396bc84d3cfa/volumes" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.386925 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": container with ID starting with 61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f not found: ID does not exist" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.386954 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} err="failed to get container status \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": rpc error: code = NotFound desc = could not find container \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": container with ID starting with 61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.386971 4810 scope.go:117] "RemoveContainer" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.387242 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": container with ID starting with 873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7 not found: ID does not exist" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.387277 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} err="failed to get container status \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": rpc error: code = NotFound desc = could not find container \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": container with ID starting with 873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.387298 4810 scope.go:117] "RemoveContainer" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.387605 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": container with ID starting with 94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946 not found: ID does not exist" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.387634 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} err="failed to get container status \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": rpc error: code = NotFound desc = could not find container \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": container with ID starting with 94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.387654 4810 scope.go:117] "RemoveContainer" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" Dec 03 05:52:22 crc kubenswrapper[4810]: E1203 05:52:22.387920 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": container with ID starting with 5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c not found: ID does not exist" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.387946 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} err="failed to get container status \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": rpc error: code = NotFound desc = could not find container \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": container with ID starting with 5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.387966 4810 scope.go:117] "RemoveContainer" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.388213 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} err="failed to get container status \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": rpc error: code = NotFound desc = could not find container \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": container with ID starting with 32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.388238 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.388435 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} err="failed to get container status \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": rpc error: code = NotFound desc = could not find container \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": container with ID starting with 2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.388457 4810 scope.go:117] "RemoveContainer" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.388653 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} err="failed to get container status \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": rpc error: code = NotFound desc = could not find container \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": container with ID starting with aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.388674 4810 scope.go:117] "RemoveContainer" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.389450 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} err="failed to get container status \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": rpc error: code = NotFound desc = could not find container \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": container with ID starting with 2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.389480 4810 scope.go:117] "RemoveContainer" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.389725 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} err="failed to get container status \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": rpc error: code = NotFound desc = could not find container \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": container with ID starting with 442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.389784 4810 scope.go:117] "RemoveContainer" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.393910 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} err="failed to get container status \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": rpc error: code = NotFound desc = could not find container \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": container with ID starting with 7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.393941 4810 scope.go:117] "RemoveContainer" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.394279 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} err="failed to get container status \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": rpc error: code = NotFound desc = could not find container \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": container with ID starting with 61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.394301 4810 scope.go:117] "RemoveContainer" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.394569 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} err="failed to get container status \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": rpc error: code = NotFound desc = could not find container \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": container with ID starting with 873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.394609 4810 scope.go:117] "RemoveContainer" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.394882 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} err="failed to get container status \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": rpc error: code = NotFound desc = could not find container \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": container with ID starting with 94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.394906 4810 scope.go:117] "RemoveContainer" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395098 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} err="failed to get container status \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": rpc error: code = NotFound desc = could not find container \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": container with ID starting with 5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395117 4810 scope.go:117] "RemoveContainer" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395458 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} err="failed to get container status \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": rpc error: code = NotFound desc = could not find container \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": container with ID starting with 32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395503 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395698 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} err="failed to get container status \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": rpc error: code = NotFound desc = could not find container \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": container with ID starting with 2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395720 4810 scope.go:117] "RemoveContainer" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395939 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} err="failed to get container status \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": rpc error: code = NotFound desc = could not find container \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": container with ID starting with aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.395963 4810 scope.go:117] "RemoveContainer" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396184 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} err="failed to get container status \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": rpc error: code = NotFound desc = could not find container \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": container with ID starting with 2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396208 4810 scope.go:117] "RemoveContainer" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396469 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} err="failed to get container status \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": rpc error: code = NotFound desc = could not find container \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": container with ID starting with 442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396492 4810 scope.go:117] "RemoveContainer" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396747 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} err="failed to get container status \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": rpc error: code = NotFound desc = could not find container \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": container with ID starting with 7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396767 4810 scope.go:117] "RemoveContainer" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396929 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} err="failed to get container status \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": rpc error: code = NotFound desc = could not find container \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": container with ID starting with 61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.396947 4810 scope.go:117] "RemoveContainer" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397108 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} err="failed to get container status \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": rpc error: code = NotFound desc = could not find container \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": container with ID starting with 873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397126 4810 scope.go:117] "RemoveContainer" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397318 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} err="failed to get container status \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": rpc error: code = NotFound desc = could not find container \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": container with ID starting with 94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397349 4810 scope.go:117] "RemoveContainer" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397497 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} err="failed to get container status \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": rpc error: code = NotFound desc = could not find container \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": container with ID starting with 5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397586 4810 scope.go:117] "RemoveContainer" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.397992 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} err="failed to get container status \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": rpc error: code = NotFound desc = could not find container \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": container with ID starting with 32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398015 4810 scope.go:117] "RemoveContainer" containerID="2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398186 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6"} err="failed to get container status \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": rpc error: code = NotFound desc = could not find container \"2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6\": container with ID starting with 2b47957b8410da2a8c528383bb47cd70fbedb07504f4496ecf377fbb73a732f6 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398206 4810 scope.go:117] "RemoveContainer" containerID="aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398380 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac"} err="failed to get container status \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": rpc error: code = NotFound desc = could not find container \"aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac\": container with ID starting with aaf0c5b7247afecd23c4e40e7ab5faf24ef9b935e406e3a864e25505b1d4c7ac not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398398 4810 scope.go:117] "RemoveContainer" containerID="2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398563 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1"} err="failed to get container status \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": rpc error: code = NotFound desc = could not find container \"2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1\": container with ID starting with 2e46fabebca0f21843c2417c9433e806d83301e39d13265d89477ec72024fba1 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398580 4810 scope.go:117] "RemoveContainer" containerID="442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398847 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf"} err="failed to get container status \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": rpc error: code = NotFound desc = could not find container \"442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf\": container with ID starting with 442cc079fbbdcc518da009e4f0e9da61d2e3c1911c8102f898142926a21dcbbf not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.398865 4810 scope.go:117] "RemoveContainer" containerID="7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399243 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5"} err="failed to get container status \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": rpc error: code = NotFound desc = could not find container \"7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5\": container with ID starting with 7d1cec591e908ceff4141a052fc562c191fe3b5befd8f79ce539cc4f4d71fde5 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399262 4810 scope.go:117] "RemoveContainer" containerID="61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399481 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f"} err="failed to get container status \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": rpc error: code = NotFound desc = could not find container \"61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f\": container with ID starting with 61bac425d21bce3fb4a4775c8813be4ef6b9c3c45826b45e0ba4fc28dcad037f not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399497 4810 scope.go:117] "RemoveContainer" containerID="873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399639 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7"} err="failed to get container status \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": rpc error: code = NotFound desc = could not find container \"873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7\": container with ID starting with 873dc5f77ca371426f55f8ed6d9bd4c7adcf0af2cda1b9bc504b0324a1a4a8a7 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399657 4810 scope.go:117] "RemoveContainer" containerID="94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399816 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946"} err="failed to get container status \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": rpc error: code = NotFound desc = could not find container \"94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946\": container with ID starting with 94cd9e4b6e83cf500bc14a2b689835f49912c96abcdea328f8f8fb41fe8da946 not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399834 4810 scope.go:117] "RemoveContainer" containerID="5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399976 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c"} err="failed to get container status \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": rpc error: code = NotFound desc = could not find container \"5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c\": container with ID starting with 5f5551587498f61597dcac6f9ce6ecaed1ef227573d52f87d9388d607ca4867c not found: ID does not exist" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.399989 4810 scope.go:117] "RemoveContainer" containerID="32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0" Dec 03 05:52:22 crc kubenswrapper[4810]: I1203 05:52:22.400169 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0"} err="failed to get container status \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": rpc error: code = NotFound desc = could not find container \"32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0\": container with ID starting with 32a04b040406622e63d1814578f660a5a4b0fab7c7803d28b42670e130a035a0 not found: ID does not exist" Dec 03 05:52:23 crc kubenswrapper[4810]: I1203 05:52:23.162366 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"58f12ed4db54c78aa51eebddadcb790f007652982d2a2575e1844cb3ab714994"} Dec 03 05:52:23 crc kubenswrapper[4810]: I1203 05:52:23.162758 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"3c9ec147ef60762fc804d6312a36e02980edd001ea57c829ee0b5ce118095792"} Dec 03 05:52:23 crc kubenswrapper[4810]: I1203 05:52:23.162782 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"1a2fb41b1be7184c53ca9683f87b76bec63d7f2b8f9d3ac2b63f9936b00a742d"} Dec 03 05:52:23 crc kubenswrapper[4810]: I1203 05:52:23.162792 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"4fede30816b87f17e686dab22d3ee61deb4fc9547294799924459a693340f26d"} Dec 03 05:52:23 crc kubenswrapper[4810]: I1203 05:52:23.162802 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"7e00a08b5235ec8cffa1439a292e85969c75063dd4671225a1cc3c1e53502b0a"} Dec 03 05:52:23 crc kubenswrapper[4810]: I1203 05:52:23.162812 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"245328f4251cfe414d8605274954fa86977810d064f96920dc642d8b187cf2d7"} Dec 03 05:52:25 crc kubenswrapper[4810]: I1203 05:52:25.177975 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"a6d8969d0b6762e86ef868afe4927d80a0c7a7c99275e6412c4d16ad8097febf"} Dec 03 05:52:28 crc kubenswrapper[4810]: I1203 05:52:28.206320 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" event={"ID":"8eafafc1-1279-4e10-91a2-1b44158e99cf","Type":"ContainerStarted","Data":"86390ca01c28877d63d018fa9ad2414b53f4183868dc41bda0126a04142abf33"} Dec 03 05:52:28 crc kubenswrapper[4810]: I1203 05:52:28.207171 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:28 crc kubenswrapper[4810]: I1203 05:52:28.207199 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:28 crc kubenswrapper[4810]: I1203 05:52:28.248472 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" podStartSLOduration=7.248453216 podStartE2EDuration="7.248453216s" podCreationTimestamp="2025-12-03 05:52:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:52:28.248357124 +0000 UTC m=+672.183817985" watchObservedRunningTime="2025-12-03 05:52:28.248453216 +0000 UTC m=+672.183914067" Dec 03 05:52:28 crc kubenswrapper[4810]: I1203 05:52:28.269008 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:29 crc kubenswrapper[4810]: I1203 05:52:29.211727 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:29 crc kubenswrapper[4810]: I1203 05:52:29.240039 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:34 crc kubenswrapper[4810]: I1203 05:52:34.377499 4810 scope.go:117] "RemoveContainer" containerID="75a1cbee2ae5b02f05cab9fd6b558ab3a7960ce58042a9d876cfb52c90f3e927" Dec 03 05:52:34 crc kubenswrapper[4810]: E1203 05:52:34.378074 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-4279f_openshift-multus(61ac6c2e-df95-49c5-a959-0e061e9c5909)\"" pod="openshift-multus/multus-4279f" podUID="61ac6c2e-df95-49c5-a959-0e061e9c5909" Dec 03 05:52:45 crc kubenswrapper[4810]: I1203 05:52:45.377946 4810 scope.go:117] "RemoveContainer" containerID="75a1cbee2ae5b02f05cab9fd6b558ab3a7960ce58042a9d876cfb52c90f3e927" Dec 03 05:52:46 crc kubenswrapper[4810]: I1203 05:52:46.311078 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4279f_61ac6c2e-df95-49c5-a959-0e061e9c5909/kube-multus/2.log" Dec 03 05:52:46 crc kubenswrapper[4810]: I1203 05:52:46.311980 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4279f" event={"ID":"61ac6c2e-df95-49c5-a959-0e061e9c5909","Type":"ContainerStarted","Data":"2733624b8d7454e92e491e2f4e169b8c713340d17141b1a0446754a6904faece"} Dec 03 05:52:51 crc kubenswrapper[4810]: I1203 05:52:51.649652 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m58g6" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.747117 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz"] Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.748746 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.751433 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.769767 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz"] Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.850246 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf4rx\" (UniqueName: \"kubernetes.io/projected/f9912c00-ee4e-47fa-a724-f7518c8c61b6-kube-api-access-jf4rx\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.850348 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.850407 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.951389 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.951469 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf4rx\" (UniqueName: \"kubernetes.io/projected/f9912c00-ee4e-47fa-a724-f7518c8c61b6-kube-api-access-jf4rx\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.951509 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.952053 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.952179 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:56 crc kubenswrapper[4810]: I1203 05:52:56.977074 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf4rx\" (UniqueName: \"kubernetes.io/projected/f9912c00-ee4e-47fa-a724-f7518c8c61b6-kube-api-access-jf4rx\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:57 crc kubenswrapper[4810]: I1203 05:52:57.063326 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:52:57 crc kubenswrapper[4810]: I1203 05:52:57.243543 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz"] Dec 03 05:52:57 crc kubenswrapper[4810]: I1203 05:52:57.424308 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" event={"ID":"f9912c00-ee4e-47fa-a724-f7518c8c61b6","Type":"ContainerStarted","Data":"0d324c7b74bba49eb8abc384c3c8149d6bf50a8031d33030b9f72b235fb42abf"} Dec 03 05:52:58 crc kubenswrapper[4810]: I1203 05:52:58.433259 4810 generic.go:334] "Generic (PLEG): container finished" podID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerID="a498a27a809a9c2a8fbace26e1ac7082e9f78754a8ece637cbe9bf98311001f1" exitCode=0 Dec 03 05:52:58 crc kubenswrapper[4810]: I1203 05:52:58.433317 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" event={"ID":"f9912c00-ee4e-47fa-a724-f7518c8c61b6","Type":"ContainerDied","Data":"a498a27a809a9c2a8fbace26e1ac7082e9f78754a8ece637cbe9bf98311001f1"} Dec 03 05:53:00 crc kubenswrapper[4810]: I1203 05:53:00.446511 4810 generic.go:334] "Generic (PLEG): container finished" podID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerID="20581c9ba333a94b994b6ee28449d29fdcac6d1851b11db385122739d1230d03" exitCode=0 Dec 03 05:53:00 crc kubenswrapper[4810]: I1203 05:53:00.446624 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" event={"ID":"f9912c00-ee4e-47fa-a724-f7518c8c61b6","Type":"ContainerDied","Data":"20581c9ba333a94b994b6ee28449d29fdcac6d1851b11db385122739d1230d03"} Dec 03 05:53:01 crc kubenswrapper[4810]: I1203 05:53:01.459224 4810 generic.go:334] "Generic (PLEG): container finished" podID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerID="6095c86d920fdabec3815c39e3db34d1ce036a19f9c06ae5e67b491067bb7be3" exitCode=0 Dec 03 05:53:01 crc kubenswrapper[4810]: I1203 05:53:01.459298 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" event={"ID":"f9912c00-ee4e-47fa-a724-f7518c8c61b6","Type":"ContainerDied","Data":"6095c86d920fdabec3815c39e3db34d1ce036a19f9c06ae5e67b491067bb7be3"} Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.746906 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.830864 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-bundle\") pod \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.830942 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-util\") pod \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.830978 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf4rx\" (UniqueName: \"kubernetes.io/projected/f9912c00-ee4e-47fa-a724-f7518c8c61b6-kube-api-access-jf4rx\") pod \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\" (UID: \"f9912c00-ee4e-47fa-a724-f7518c8c61b6\") " Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.832334 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-bundle" (OuterVolumeSpecName: "bundle") pod "f9912c00-ee4e-47fa-a724-f7518c8c61b6" (UID: "f9912c00-ee4e-47fa-a724-f7518c8c61b6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.836857 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9912c00-ee4e-47fa-a724-f7518c8c61b6-kube-api-access-jf4rx" (OuterVolumeSpecName: "kube-api-access-jf4rx") pod "f9912c00-ee4e-47fa-a724-f7518c8c61b6" (UID: "f9912c00-ee4e-47fa-a724-f7518c8c61b6"). InnerVolumeSpecName "kube-api-access-jf4rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.855553 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-util" (OuterVolumeSpecName: "util") pod "f9912c00-ee4e-47fa-a724-f7518c8c61b6" (UID: "f9912c00-ee4e-47fa-a724-f7518c8c61b6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.932424 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.932457 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9912c00-ee4e-47fa-a724-f7518c8c61b6-util\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:02 crc kubenswrapper[4810]: I1203 05:53:02.932467 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf4rx\" (UniqueName: \"kubernetes.io/projected/f9912c00-ee4e-47fa-a724-f7518c8c61b6-kube-api-access-jf4rx\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:03 crc kubenswrapper[4810]: I1203 05:53:03.474906 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" event={"ID":"f9912c00-ee4e-47fa-a724-f7518c8c61b6","Type":"ContainerDied","Data":"0d324c7b74bba49eb8abc384c3c8149d6bf50a8031d33030b9f72b235fb42abf"} Dec 03 05:53:03 crc kubenswrapper[4810]: I1203 05:53:03.475291 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d324c7b74bba49eb8abc384c3c8149d6bf50a8031d33030b9f72b235fb42abf" Dec 03 05:53:03 crc kubenswrapper[4810]: I1203 05:53:03.475013 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.601112 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c"] Dec 03 05:53:05 crc kubenswrapper[4810]: E1203 05:53:05.602537 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="extract" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.602579 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="extract" Dec 03 05:53:05 crc kubenswrapper[4810]: E1203 05:53:05.602608 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="pull" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.602620 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="pull" Dec 03 05:53:05 crc kubenswrapper[4810]: E1203 05:53:05.602666 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="util" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.602676 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="util" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.602996 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9912c00-ee4e-47fa-a724-f7518c8c61b6" containerName="extract" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.603793 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.612110 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.612285 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.612275 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-m79xn" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.625516 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c"] Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.766670 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr2gp\" (UniqueName: \"kubernetes.io/projected/0e12845f-64f7-48a4-8297-25f3eef40777-kube-api-access-kr2gp\") pod \"nmstate-operator-5b5b58f5c8-ppp7c\" (UID: \"0e12845f-64f7-48a4-8297-25f3eef40777\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.867879 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr2gp\" (UniqueName: \"kubernetes.io/projected/0e12845f-64f7-48a4-8297-25f3eef40777-kube-api-access-kr2gp\") pod \"nmstate-operator-5b5b58f5c8-ppp7c\" (UID: \"0e12845f-64f7-48a4-8297-25f3eef40777\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.894693 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr2gp\" (UniqueName: \"kubernetes.io/projected/0e12845f-64f7-48a4-8297-25f3eef40777-kube-api-access-kr2gp\") pod \"nmstate-operator-5b5b58f5c8-ppp7c\" (UID: \"0e12845f-64f7-48a4-8297-25f3eef40777\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" Dec 03 05:53:05 crc kubenswrapper[4810]: I1203 05:53:05.930605 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" Dec 03 05:53:06 crc kubenswrapper[4810]: I1203 05:53:06.108177 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c"] Dec 03 05:53:06 crc kubenswrapper[4810]: I1203 05:53:06.493667 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" event={"ID":"0e12845f-64f7-48a4-8297-25f3eef40777","Type":"ContainerStarted","Data":"659feb808ae18bb10ba7d48b05e667ab17f618bd54cd248e9fc4ad318b48b66e"} Dec 03 05:53:08 crc kubenswrapper[4810]: I1203 05:53:08.505118 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" event={"ID":"0e12845f-64f7-48a4-8297-25f3eef40777","Type":"ContainerStarted","Data":"6f4ffe0e4d7abea05e2f233e64a7772d35a4cc161d410ac5c2224be1340eb4a3"} Dec 03 05:53:08 crc kubenswrapper[4810]: I1203 05:53:08.522978 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ppp7c" podStartSLOduration=1.636875366 podStartE2EDuration="3.522953675s" podCreationTimestamp="2025-12-03 05:53:05 +0000 UTC" firstStartedPulling="2025-12-03 05:53:06.122214765 +0000 UTC m=+710.057675616" lastFinishedPulling="2025-12-03 05:53:08.008293084 +0000 UTC m=+711.943753925" observedRunningTime="2025-12-03 05:53:08.521378387 +0000 UTC m=+712.456839228" watchObservedRunningTime="2025-12-03 05:53:08.522953675 +0000 UTC m=+712.458414516" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.511198 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.512143 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.514449 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-tfsk7" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.520959 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.524018 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.524700 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.527908 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.540496 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.575456 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-wvjvm"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.578126 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.615739 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdv94\" (UniqueName: \"kubernetes.io/projected/430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b-kube-api-access-rdv94\") pod \"nmstate-metrics-7f946cbc9-ln57t\" (UID: \"430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717009 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdv94\" (UniqueName: \"kubernetes.io/projected/430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b-kube-api-access-rdv94\") pod \"nmstate-metrics-7f946cbc9-ln57t\" (UID: \"430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717074 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bdhj\" (UniqueName: \"kubernetes.io/projected/283887bd-09bf-4f88-81f4-efb5ade1b9de-kube-api-access-6bdhj\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717098 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-nmstate-lock\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717124 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zbxp\" (UniqueName: \"kubernetes.io/projected/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-kube-api-access-5zbxp\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717148 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/283887bd-09bf-4f88-81f4-efb5ade1b9de-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717166 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-ovs-socket\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.717182 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-dbus-socket\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.718074 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.718811 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.721686 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.721929 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.723364 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-bxjld" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.739981 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.747933 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdv94\" (UniqueName: \"kubernetes.io/projected/430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b-kube-api-access-rdv94\") pod \"nmstate-metrics-7f946cbc9-ln57t\" (UID: \"430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.817989 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvgzf\" (UniqueName: \"kubernetes.io/projected/aeb379d5-f663-4858-a0b5-27614ecf83e9-kube-api-access-fvgzf\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818096 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bdhj\" (UniqueName: \"kubernetes.io/projected/283887bd-09bf-4f88-81f4-efb5ade1b9de-kube-api-access-6bdhj\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818125 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-nmstate-lock\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818152 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zbxp\" (UniqueName: \"kubernetes.io/projected/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-kube-api-access-5zbxp\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818177 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/283887bd-09bf-4f88-81f4-efb5ade1b9de-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818195 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-ovs-socket\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818209 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-dbus-socket\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818237 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb379d5-f663-4858-a0b5-27614ecf83e9-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818268 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/aeb379d5-f663-4858-a0b5-27614ecf83e9-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818675 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-ovs-socket\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.818728 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-nmstate-lock\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: E1203 05:53:09.818860 4810 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 05:53:09 crc kubenswrapper[4810]: E1203 05:53:09.819012 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/283887bd-09bf-4f88-81f4-efb5ade1b9de-tls-key-pair podName:283887bd-09bf-4f88-81f4-efb5ade1b9de nodeName:}" failed. No retries permitted until 2025-12-03 05:53:10.318987491 +0000 UTC m=+714.254448332 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/283887bd-09bf-4f88-81f4-efb5ade1b9de-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-sqp7g" (UID: "283887bd-09bf-4f88-81f4-efb5ade1b9de") : secret "openshift-nmstate-webhook" not found Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.819100 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-dbus-socket\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.840901 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zbxp\" (UniqueName: \"kubernetes.io/projected/a70a2c16-7c9a-45aa-b91d-7d6f44b821c6-kube-api-access-5zbxp\") pod \"nmstate-handler-wvjvm\" (UID: \"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6\") " pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.854332 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bdhj\" (UniqueName: \"kubernetes.io/projected/283887bd-09bf-4f88-81f4-efb5ade1b9de-kube-api-access-6bdhj\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.856631 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.909126 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.919681 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/aeb379d5-f663-4858-a0b5-27614ecf83e9-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.919756 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvgzf\" (UniqueName: \"kubernetes.io/projected/aeb379d5-f663-4858-a0b5-27614ecf83e9-kube-api-access-fvgzf\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.919845 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb379d5-f663-4858-a0b5-27614ecf83e9-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.922522 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/aeb379d5-f663-4858-a0b5-27614ecf83e9-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.926154 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb379d5-f663-4858-a0b5-27614ecf83e9-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.937490 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5ff446b87-wfb9h"] Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.938408 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.952041 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvgzf\" (UniqueName: \"kubernetes.io/projected/aeb379d5-f663-4858-a0b5-27614ecf83e9-kube-api-access-fvgzf\") pod \"nmstate-console-plugin-7fbb5f6569-6drbj\" (UID: \"aeb379d5-f663-4858-a0b5-27614ecf83e9\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:09 crc kubenswrapper[4810]: I1203 05:53:09.952722 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5ff446b87-wfb9h"] Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.035366 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.113400 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t"] Dec 03 05:53:10 crc kubenswrapper[4810]: W1203 05:53:10.115139 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod430bfc6b_2d80_4c02_8b7e_7e7eb7880b2b.slice/crio-7946ba7a916eb4e3ae0b393a2bd41ad132280e7470215bbc2ae3dd7f00bb9d94 WatchSource:0}: Error finding container 7946ba7a916eb4e3ae0b393a2bd41ad132280e7470215bbc2ae3dd7f00bb9d94: Status 404 returned error can't find the container with id 7946ba7a916eb4e3ae0b393a2bd41ad132280e7470215bbc2ae3dd7f00bb9d94 Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122114 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4prs\" (UniqueName: \"kubernetes.io/projected/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-kube-api-access-q4prs\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122146 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-serving-cert\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122282 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-config\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122315 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-service-ca\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122331 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-oauth-config\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122344 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-oauth-serving-cert\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.122374 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-trusted-ca-bundle\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.215193 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj"] Dec 03 05:53:10 crc kubenswrapper[4810]: W1203 05:53:10.221098 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaeb379d5_f663_4858_a0b5_27614ecf83e9.slice/crio-ef05be841df2d85206928dc0b9ba3b04c18fd203004d4b42d467e2d60fad6b05 WatchSource:0}: Error finding container ef05be841df2d85206928dc0b9ba3b04c18fd203004d4b42d467e2d60fad6b05: Status 404 returned error can't find the container with id ef05be841df2d85206928dc0b9ba3b04c18fd203004d4b42d467e2d60fad6b05 Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223545 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-trusted-ca-bundle\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223615 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4prs\" (UniqueName: \"kubernetes.io/projected/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-kube-api-access-q4prs\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223636 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-serving-cert\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223667 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-config\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223696 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-service-ca\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223711 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-oauth-config\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.223742 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-oauth-serving-cert\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.225143 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-oauth-serving-cert\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.226037 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-config\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.226260 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-service-ca\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.226984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-trusted-ca-bundle\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.229488 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-serving-cert\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.231879 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-console-oauth-config\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.238472 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4prs\" (UniqueName: \"kubernetes.io/projected/1f33905a-61c1-4f91-889f-a4a8ea74fb5b-kube-api-access-q4prs\") pod \"console-5ff446b87-wfb9h\" (UID: \"1f33905a-61c1-4f91-889f-a4a8ea74fb5b\") " pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.270273 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.325527 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/283887bd-09bf-4f88-81f4-efb5ade1b9de-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.330535 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/283887bd-09bf-4f88-81f4-efb5ade1b9de-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sqp7g\" (UID: \"283887bd-09bf-4f88-81f4-efb5ade1b9de\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.464485 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.466977 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5ff446b87-wfb9h"] Dec 03 05:53:10 crc kubenswrapper[4810]: W1203 05:53:10.476101 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f33905a_61c1_4f91_889f_a4a8ea74fb5b.slice/crio-bda8fff13c600fdaab08a2fed87223a97b31352d1270a60fc3a34b4a1db5e0ca WatchSource:0}: Error finding container bda8fff13c600fdaab08a2fed87223a97b31352d1270a60fc3a34b4a1db5e0ca: Status 404 returned error can't find the container with id bda8fff13c600fdaab08a2fed87223a97b31352d1270a60fc3a34b4a1db5e0ca Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.520011 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5ff446b87-wfb9h" event={"ID":"1f33905a-61c1-4f91-889f-a4a8ea74fb5b","Type":"ContainerStarted","Data":"bda8fff13c600fdaab08a2fed87223a97b31352d1270a60fc3a34b4a1db5e0ca"} Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.520791 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" event={"ID":"aeb379d5-f663-4858-a0b5-27614ecf83e9","Type":"ContainerStarted","Data":"ef05be841df2d85206928dc0b9ba3b04c18fd203004d4b42d467e2d60fad6b05"} Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.522385 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" event={"ID":"430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b","Type":"ContainerStarted","Data":"7946ba7a916eb4e3ae0b393a2bd41ad132280e7470215bbc2ae3dd7f00bb9d94"} Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.523024 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wvjvm" event={"ID":"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6","Type":"ContainerStarted","Data":"0ab3ca72486a2bc0ae3a3836d8a4548485cc9bfc0c3a5c93051f4d853e61d868"} Dec 03 05:53:10 crc kubenswrapper[4810]: I1203 05:53:10.657105 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g"] Dec 03 05:53:11 crc kubenswrapper[4810]: I1203 05:53:11.532391 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" event={"ID":"283887bd-09bf-4f88-81f4-efb5ade1b9de","Type":"ContainerStarted","Data":"abf69f0049553564073c5e493c6a35f37c9f49e6425dd8e95bfaad8adadee488"} Dec 03 05:53:11 crc kubenswrapper[4810]: I1203 05:53:11.535038 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5ff446b87-wfb9h" event={"ID":"1f33905a-61c1-4f91-889f-a4a8ea74fb5b","Type":"ContainerStarted","Data":"ac4c6eac7d65a97cca49b9c93e6a1039ca8e3557c0f707c8d02865f1f2e655cc"} Dec 03 05:53:11 crc kubenswrapper[4810]: I1203 05:53:11.557650 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5ff446b87-wfb9h" podStartSLOduration=2.5576297820000002 podStartE2EDuration="2.557629782s" podCreationTimestamp="2025-12-03 05:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:53:11.553499754 +0000 UTC m=+715.488960595" watchObservedRunningTime="2025-12-03 05:53:11.557629782 +0000 UTC m=+715.493090623" Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.555894 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" event={"ID":"283887bd-09bf-4f88-81f4-efb5ade1b9de","Type":"ContainerStarted","Data":"9691d137609d6ee91c4f754483c0eece6b65224f760fe1eb1b1539cb376dc98e"} Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.556471 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.559650 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wvjvm" event={"ID":"a70a2c16-7c9a-45aa-b91d-7d6f44b821c6","Type":"ContainerStarted","Data":"a55b0cdceac85bfe6e5a8df86fdbdbed0f17c95476fedc41794264e2ebcab769"} Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.559776 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.562444 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" event={"ID":"aeb379d5-f663-4858-a0b5-27614ecf83e9","Type":"ContainerStarted","Data":"c7dad12ad65e88f333aa7a379288fa83337f3a58e88f6f2bc1689ac2dd6c4f28"} Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.563458 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" event={"ID":"430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b","Type":"ContainerStarted","Data":"b1ffd1d8df81d45c9b242e19b154940260bc20774f730cced77b196e573c0d93"} Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.578093 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" podStartSLOduration=2.419337161 podStartE2EDuration="4.578071391s" podCreationTimestamp="2025-12-03 05:53:09 +0000 UTC" firstStartedPulling="2025-12-03 05:53:10.668009878 +0000 UTC m=+714.603470729" lastFinishedPulling="2025-12-03 05:53:12.826744118 +0000 UTC m=+716.762204959" observedRunningTime="2025-12-03 05:53:13.577293283 +0000 UTC m=+717.512754144" watchObservedRunningTime="2025-12-03 05:53:13.578071391 +0000 UTC m=+717.513532232" Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.597354 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-wvjvm" podStartSLOduration=1.735907047 podStartE2EDuration="4.597300835s" podCreationTimestamp="2025-12-03 05:53:09 +0000 UTC" firstStartedPulling="2025-12-03 05:53:09.97549641 +0000 UTC m=+713.910957251" lastFinishedPulling="2025-12-03 05:53:12.836890198 +0000 UTC m=+716.772351039" observedRunningTime="2025-12-03 05:53:13.594235853 +0000 UTC m=+717.529696694" watchObservedRunningTime="2025-12-03 05:53:13.597300835 +0000 UTC m=+717.532761676" Dec 03 05:53:13 crc kubenswrapper[4810]: I1203 05:53:13.614157 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-6drbj" podStartSLOduration=2.025795628 podStartE2EDuration="4.614130362s" podCreationTimestamp="2025-12-03 05:53:09 +0000 UTC" firstStartedPulling="2025-12-03 05:53:10.230777874 +0000 UTC m=+714.166238715" lastFinishedPulling="2025-12-03 05:53:12.819112598 +0000 UTC m=+716.754573449" observedRunningTime="2025-12-03 05:53:13.609066763 +0000 UTC m=+717.544527604" watchObservedRunningTime="2025-12-03 05:53:13.614130362 +0000 UTC m=+717.549591203" Dec 03 05:53:16 crc kubenswrapper[4810]: I1203 05:53:16.580908 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" event={"ID":"430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b","Type":"ContainerStarted","Data":"003ecbc53de5085f5a7e9c2062797871d6e59a9ac10139bab4642ac9e50c0072"} Dec 03 05:53:16 crc kubenswrapper[4810]: I1203 05:53:16.601750 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ln57t" podStartSLOduration=2.018673352 podStartE2EDuration="7.601698994s" podCreationTimestamp="2025-12-03 05:53:09 +0000 UTC" firstStartedPulling="2025-12-03 05:53:10.117488594 +0000 UTC m=+714.052949435" lastFinishedPulling="2025-12-03 05:53:15.700514226 +0000 UTC m=+719.635975077" observedRunningTime="2025-12-03 05:53:16.595677722 +0000 UTC m=+720.531138573" watchObservedRunningTime="2025-12-03 05:53:16.601698994 +0000 UTC m=+720.537159845" Dec 03 05:53:19 crc kubenswrapper[4810]: I1203 05:53:19.931489 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-wvjvm" Dec 03 05:53:20 crc kubenswrapper[4810]: I1203 05:53:20.271172 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:20 crc kubenswrapper[4810]: I1203 05:53:20.271594 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:20 crc kubenswrapper[4810]: I1203 05:53:20.277328 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:20 crc kubenswrapper[4810]: I1203 05:53:20.614306 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5ff446b87-wfb9h" Dec 03 05:53:20 crc kubenswrapper[4810]: I1203 05:53:20.666415 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-fjrzc"] Dec 03 05:53:25 crc kubenswrapper[4810]: I1203 05:53:25.677856 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:53:25 crc kubenswrapper[4810]: I1203 05:53:25.678314 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:53:30 crc kubenswrapper[4810]: I1203 05:53:30.470828 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sqp7g" Dec 03 05:53:45 crc kubenswrapper[4810]: I1203 05:53:45.716758 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-fjrzc" podUID="d773e027-f95d-450b-bacc-f30b1235784c" containerName="console" containerID="cri-o://b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05" gracePeriod=15 Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.303877 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t"] Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.305714 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.321432 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.325872 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t"] Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.352776 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.352979 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v258t\" (UniqueName: \"kubernetes.io/projected/fc6d4c38-ea04-420d-b505-92115a81cb40-kube-api-access-v258t\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.353156 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.410707 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-fjrzc_d773e027-f95d-450b-bacc-f30b1235784c/console/0.log" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.410802 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453764 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-trusted-ca-bundle\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453842 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-service-ca\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453876 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgdmp\" (UniqueName: \"kubernetes.io/projected/d773e027-f95d-450b-bacc-f30b1235784c-kube-api-access-vgdmp\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453933 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-oauth-serving-cert\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453952 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-console-config\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453968 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-serving-cert\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.453991 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-oauth-config\") pod \"d773e027-f95d-450b-bacc-f30b1235784c\" (UID: \"d773e027-f95d-450b-bacc-f30b1235784c\") " Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454102 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454149 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v258t\" (UniqueName: \"kubernetes.io/projected/fc6d4c38-ea04-420d-b505-92115a81cb40-kube-api-access-v258t\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454186 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454617 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454649 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-service-ca" (OuterVolumeSpecName: "service-ca") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454685 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.454719 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.455146 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.455134 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-console-config" (OuterVolumeSpecName: "console-config") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.460420 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d773e027-f95d-450b-bacc-f30b1235784c-kube-api-access-vgdmp" (OuterVolumeSpecName: "kube-api-access-vgdmp") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "kube-api-access-vgdmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.460451 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.469345 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "d773e027-f95d-450b-bacc-f30b1235784c" (UID: "d773e027-f95d-450b-bacc-f30b1235784c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.470230 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v258t\" (UniqueName: \"kubernetes.io/projected/fc6d4c38-ea04-420d-b505-92115a81cb40-kube-api-access-v258t\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555225 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555270 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgdmp\" (UniqueName: \"kubernetes.io/projected/d773e027-f95d-450b-bacc-f30b1235784c-kube-api-access-vgdmp\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555286 4810 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555299 4810 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555310 4810 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555321 4810 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d773e027-f95d-450b-bacc-f30b1235784c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.555332 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d773e027-f95d-450b-bacc-f30b1235784c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.638931 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.791268 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-fjrzc_d773e027-f95d-450b-bacc-f30b1235784c/console/0.log" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.791647 4810 generic.go:334] "Generic (PLEG): container finished" podID="d773e027-f95d-450b-bacc-f30b1235784c" containerID="b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05" exitCode=2 Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.791693 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fjrzc" event={"ID":"d773e027-f95d-450b-bacc-f30b1235784c","Type":"ContainerDied","Data":"b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05"} Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.791761 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fjrzc" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.791799 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fjrzc" event={"ID":"d773e027-f95d-450b-bacc-f30b1235784c","Type":"ContainerDied","Data":"274e808343343184b3732b92c4ec03ba4d5149c477cdbf39026d1b136ed832db"} Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.791823 4810 scope.go:117] "RemoveContainer" containerID="b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.822074 4810 scope.go:117] "RemoveContainer" containerID="b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05" Dec 03 05:53:46 crc kubenswrapper[4810]: E1203 05:53:46.822705 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05\": container with ID starting with b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05 not found: ID does not exist" containerID="b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.822794 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05"} err="failed to get container status \"b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05\": rpc error: code = NotFound desc = could not find container \"b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05\": container with ID starting with b397687c0b6bff35700d81d4943a98753290815f109475add84b54770d3b5d05 not found: ID does not exist" Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.824333 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-fjrzc"] Dec 03 05:53:46 crc kubenswrapper[4810]: I1203 05:53:46.830665 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-fjrzc"] Dec 03 05:53:47 crc kubenswrapper[4810]: I1203 05:53:47.073565 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t"] Dec 03 05:53:47 crc kubenswrapper[4810]: I1203 05:53:47.800553 4810 generic.go:334] "Generic (PLEG): container finished" podID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerID="63cf951e46ff7614899d8dfd7bdd6d9c7c46cc2def6e50277614e3452714a894" exitCode=0 Dec 03 05:53:47 crc kubenswrapper[4810]: I1203 05:53:47.800606 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" event={"ID":"fc6d4c38-ea04-420d-b505-92115a81cb40","Type":"ContainerDied","Data":"63cf951e46ff7614899d8dfd7bdd6d9c7c46cc2def6e50277614e3452714a894"} Dec 03 05:53:47 crc kubenswrapper[4810]: I1203 05:53:47.800928 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" event={"ID":"fc6d4c38-ea04-420d-b505-92115a81cb40","Type":"ContainerStarted","Data":"de04bf1614c07d160692895a3a62c72cf4c6232bc385dcc57fca3bbc417b2cd1"} Dec 03 05:53:48 crc kubenswrapper[4810]: I1203 05:53:48.385508 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d773e027-f95d-450b-bacc-f30b1235784c" path="/var/lib/kubelet/pods/d773e027-f95d-450b-bacc-f30b1235784c/volumes" Dec 03 05:53:51 crc kubenswrapper[4810]: I1203 05:53:51.829539 4810 generic.go:334] "Generic (PLEG): container finished" podID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerID="31c5c5272d157125733fde919d48a50f91090f24ab8fc69da2104c7154ac5f22" exitCode=0 Dec 03 05:53:51 crc kubenswrapper[4810]: I1203 05:53:51.829706 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" event={"ID":"fc6d4c38-ea04-420d-b505-92115a81cb40","Type":"ContainerDied","Data":"31c5c5272d157125733fde919d48a50f91090f24ab8fc69da2104c7154ac5f22"} Dec 03 05:53:52 crc kubenswrapper[4810]: I1203 05:53:52.838902 4810 generic.go:334] "Generic (PLEG): container finished" podID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerID="af2cecc3161c44517285cca48ea478eff7a21179b6e2af56972804857b9a7b58" exitCode=0 Dec 03 05:53:52 crc kubenswrapper[4810]: I1203 05:53:52.838981 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" event={"ID":"fc6d4c38-ea04-420d-b505-92115a81cb40","Type":"ContainerDied","Data":"af2cecc3161c44517285cca48ea478eff7a21179b6e2af56972804857b9a7b58"} Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.064186 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.260176 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-bundle\") pod \"fc6d4c38-ea04-420d-b505-92115a81cb40\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.260226 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-util\") pod \"fc6d4c38-ea04-420d-b505-92115a81cb40\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.260351 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v258t\" (UniqueName: \"kubernetes.io/projected/fc6d4c38-ea04-420d-b505-92115a81cb40-kube-api-access-v258t\") pod \"fc6d4c38-ea04-420d-b505-92115a81cb40\" (UID: \"fc6d4c38-ea04-420d-b505-92115a81cb40\") " Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.262002 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-bundle" (OuterVolumeSpecName: "bundle") pod "fc6d4c38-ea04-420d-b505-92115a81cb40" (UID: "fc6d4c38-ea04-420d-b505-92115a81cb40"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.264927 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v25sw"] Dec 03 05:53:54 crc kubenswrapper[4810]: E1203 05:53:54.265182 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="util" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.265194 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="util" Dec 03 05:53:54 crc kubenswrapper[4810]: E1203 05:53:54.265236 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="pull" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.265243 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="pull" Dec 03 05:53:54 crc kubenswrapper[4810]: E1203 05:53:54.265252 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d773e027-f95d-450b-bacc-f30b1235784c" containerName="console" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.265259 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d773e027-f95d-450b-bacc-f30b1235784c" containerName="console" Dec 03 05:53:54 crc kubenswrapper[4810]: E1203 05:53:54.265266 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="extract" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.265272 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="extract" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.265376 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc6d4c38-ea04-420d-b505-92115a81cb40" containerName="extract" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.265640 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d773e027-f95d-450b-bacc-f30b1235784c" containerName="console" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.266513 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.266895 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc6d4c38-ea04-420d-b505-92115a81cb40-kube-api-access-v258t" (OuterVolumeSpecName: "kube-api-access-v258t") pod "fc6d4c38-ea04-420d-b505-92115a81cb40" (UID: "fc6d4c38-ea04-420d-b505-92115a81cb40"). InnerVolumeSpecName "kube-api-access-v258t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.271415 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-util" (OuterVolumeSpecName: "util") pod "fc6d4c38-ea04-420d-b505-92115a81cb40" (UID: "fc6d4c38-ea04-420d-b505-92115a81cb40"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.274244 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v25sw"] Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.363529 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v258t\" (UniqueName: \"kubernetes.io/projected/fc6d4c38-ea04-420d-b505-92115a81cb40-kube-api-access-v258t\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.363677 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-util\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.363856 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc6d4c38-ea04-420d-b505-92115a81cb40-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.465469 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-utilities\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.465533 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-catalog-content\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.465708 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vt9r\" (UniqueName: \"kubernetes.io/projected/9e856a4f-1c7e-4897-a853-bc80a9b9a659-kube-api-access-4vt9r\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.566605 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vt9r\" (UniqueName: \"kubernetes.io/projected/9e856a4f-1c7e-4897-a853-bc80a9b9a659-kube-api-access-4vt9r\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.567185 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-utilities\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.567367 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-catalog-content\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.567795 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-utilities\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.567841 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-catalog-content\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.583846 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vt9r\" (UniqueName: \"kubernetes.io/projected/9e856a4f-1c7e-4897-a853-bc80a9b9a659-kube-api-access-4vt9r\") pod \"redhat-operators-v25sw\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.617827 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.852200 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" event={"ID":"fc6d4c38-ea04-420d-b505-92115a81cb40","Type":"ContainerDied","Data":"de04bf1614c07d160692895a3a62c72cf4c6232bc385dcc57fca3bbc417b2cd1"} Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.852254 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de04bf1614c07d160692895a3a62c72cf4c6232bc385dcc57fca3bbc417b2cd1" Dec 03 05:53:54 crc kubenswrapper[4810]: I1203 05:53:54.852278 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t" Dec 03 05:53:55 crc kubenswrapper[4810]: I1203 05:53:55.071658 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v25sw"] Dec 03 05:53:55 crc kubenswrapper[4810]: I1203 05:53:55.677816 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:53:55 crc kubenswrapper[4810]: I1203 05:53:55.678274 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:53:55 crc kubenswrapper[4810]: I1203 05:53:55.860205 4810 generic.go:334] "Generic (PLEG): container finished" podID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerID="1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea" exitCode=0 Dec 03 05:53:55 crc kubenswrapper[4810]: I1203 05:53:55.860253 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerDied","Data":"1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea"} Dec 03 05:53:55 crc kubenswrapper[4810]: I1203 05:53:55.860284 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerStarted","Data":"0c83b6b0011b123a0ae70c06463cb9553565c413000c6c425d091ddf3f28b332"} Dec 03 05:53:56 crc kubenswrapper[4810]: I1203 05:53:56.867102 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerStarted","Data":"d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f"} Dec 03 05:53:58 crc kubenswrapper[4810]: I1203 05:53:58.265420 4810 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 05:53:58 crc kubenswrapper[4810]: I1203 05:53:58.878006 4810 generic.go:334] "Generic (PLEG): container finished" podID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerID="d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f" exitCode=0 Dec 03 05:53:58 crc kubenswrapper[4810]: I1203 05:53:58.878061 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerDied","Data":"d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f"} Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.894899 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n"] Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.896719 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.899294 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerStarted","Data":"21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3"} Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.901008 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.901273 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.901372 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-qt75g" Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.911669 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.911669 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 05:54:01 crc kubenswrapper[4810]: I1203 05:54:01.919959 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n"] Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.063485 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw2wj\" (UniqueName: \"kubernetes.io/projected/43572253-c54e-4ba2-b805-7a9521a015cd-kube-api-access-pw2wj\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.063597 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/43572253-c54e-4ba2-b805-7a9521a015cd-webhook-cert\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.063659 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/43572253-c54e-4ba2-b805-7a9521a015cd-apiservice-cert\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.165257 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/43572253-c54e-4ba2-b805-7a9521a015cd-webhook-cert\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.165563 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/43572253-c54e-4ba2-b805-7a9521a015cd-apiservice-cert\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.165603 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw2wj\" (UniqueName: \"kubernetes.io/projected/43572253-c54e-4ba2-b805-7a9521a015cd-kube-api-access-pw2wj\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.175565 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/43572253-c54e-4ba2-b805-7a9521a015cd-apiservice-cert\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.183505 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/43572253-c54e-4ba2-b805-7a9521a015cd-webhook-cert\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.183513 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw2wj\" (UniqueName: \"kubernetes.io/projected/43572253-c54e-4ba2-b805-7a9521a015cd-kube-api-access-pw2wj\") pod \"metallb-operator-controller-manager-579cbb8d94-dxs7n\" (UID: \"43572253-c54e-4ba2-b805-7a9521a015cd\") " pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.211847 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.251945 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl"] Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.252661 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.260127 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-7w7p9" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.260224 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.260236 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.270344 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl"] Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.367697 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfbb9\" (UniqueName: \"kubernetes.io/projected/5406f1e2-94a8-4c3c-b154-e1448775314a-kube-api-access-qfbb9\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.367783 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5406f1e2-94a8-4c3c-b154-e1448775314a-webhook-cert\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.368705 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5406f1e2-94a8-4c3c-b154-e1448775314a-apiservice-cert\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.470987 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfbb9\" (UniqueName: \"kubernetes.io/projected/5406f1e2-94a8-4c3c-b154-e1448775314a-kube-api-access-qfbb9\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.471470 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5406f1e2-94a8-4c3c-b154-e1448775314a-webhook-cert\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.471614 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5406f1e2-94a8-4c3c-b154-e1448775314a-apiservice-cert\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.478782 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5406f1e2-94a8-4c3c-b154-e1448775314a-apiservice-cert\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.482485 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5406f1e2-94a8-4c3c-b154-e1448775314a-webhook-cert\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.489375 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfbb9\" (UniqueName: \"kubernetes.io/projected/5406f1e2-94a8-4c3c-b154-e1448775314a-kube-api-access-qfbb9\") pod \"metallb-operator-webhook-server-6dfb784c-hjvnl\" (UID: \"5406f1e2-94a8-4c3c-b154-e1448775314a\") " pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.581443 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.596351 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n"] Dec 03 05:54:02 crc kubenswrapper[4810]: I1203 05:54:02.905790 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" event={"ID":"43572253-c54e-4ba2-b805-7a9521a015cd","Type":"ContainerStarted","Data":"cfdceee4d1f70a4120393d3defe58cf5b5290b989d31c3ab3914b7b6a962f397"} Dec 03 05:54:03 crc kubenswrapper[4810]: W1203 05:54:03.051599 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5406f1e2_94a8_4c3c_b154_e1448775314a.slice/crio-dcaf71cd7249e2b80196fc9cd0f5c1c485648516695296c1674d78581edd556f WatchSource:0}: Error finding container dcaf71cd7249e2b80196fc9cd0f5c1c485648516695296c1674d78581edd556f: Status 404 returned error can't find the container with id dcaf71cd7249e2b80196fc9cd0f5c1c485648516695296c1674d78581edd556f Dec 03 05:54:03 crc kubenswrapper[4810]: I1203 05:54:03.054290 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl"] Dec 03 05:54:03 crc kubenswrapper[4810]: I1203 05:54:03.914217 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" event={"ID":"5406f1e2-94a8-4c3c-b154-e1448775314a","Type":"ContainerStarted","Data":"dcaf71cd7249e2b80196fc9cd0f5c1c485648516695296c1674d78581edd556f"} Dec 03 05:54:03 crc kubenswrapper[4810]: I1203 05:54:03.943127 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v25sw" podStartSLOduration=4.77114531 podStartE2EDuration="9.943104329s" podCreationTimestamp="2025-12-03 05:53:54 +0000 UTC" firstStartedPulling="2025-12-03 05:53:55.862679245 +0000 UTC m=+759.798140116" lastFinishedPulling="2025-12-03 05:54:01.034638294 +0000 UTC m=+764.970099135" observedRunningTime="2025-12-03 05:54:03.938443219 +0000 UTC m=+767.873904060" watchObservedRunningTime="2025-12-03 05:54:03.943104329 +0000 UTC m=+767.878565190" Dec 03 05:54:04 crc kubenswrapper[4810]: I1203 05:54:04.618785 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:54:04 crc kubenswrapper[4810]: I1203 05:54:04.618852 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:54:05 crc kubenswrapper[4810]: I1203 05:54:05.689148 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-v25sw" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="registry-server" probeResult="failure" output=< Dec 03 05:54:05 crc kubenswrapper[4810]: timeout: failed to connect service ":50051" within 1s Dec 03 05:54:05 crc kubenswrapper[4810]: > Dec 03 05:54:10 crc kubenswrapper[4810]: I1203 05:54:10.960425 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" event={"ID":"43572253-c54e-4ba2-b805-7a9521a015cd","Type":"ContainerStarted","Data":"424a5035e2b84a9685aea348fc3f5af177b2f50929a9b5c4d457f9706de8feea"} Dec 03 05:54:10 crc kubenswrapper[4810]: I1203 05:54:10.961886 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:10 crc kubenswrapper[4810]: I1203 05:54:10.964300 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" event={"ID":"5406f1e2-94a8-4c3c-b154-e1448775314a","Type":"ContainerStarted","Data":"769498262d2d95f8b303aaaa38c8f0b523ed4e799fd3f80596b6a858a2703c09"} Dec 03 05:54:10 crc kubenswrapper[4810]: I1203 05:54:10.965099 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:10 crc kubenswrapper[4810]: I1203 05:54:10.985281 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" podStartSLOduration=2.23941546 podStartE2EDuration="9.985262842s" podCreationTimestamp="2025-12-03 05:54:01 +0000 UTC" firstStartedPulling="2025-12-03 05:54:02.621989426 +0000 UTC m=+766.557450267" lastFinishedPulling="2025-12-03 05:54:10.367836808 +0000 UTC m=+774.303297649" observedRunningTime="2025-12-03 05:54:10.98347925 +0000 UTC m=+774.918940101" watchObservedRunningTime="2025-12-03 05:54:10.985262842 +0000 UTC m=+774.920723693" Dec 03 05:54:11 crc kubenswrapper[4810]: I1203 05:54:11.010927 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" podStartSLOduration=1.685666396 podStartE2EDuration="9.010903137s" podCreationTimestamp="2025-12-03 05:54:02 +0000 UTC" firstStartedPulling="2025-12-03 05:54:03.057330005 +0000 UTC m=+766.992790846" lastFinishedPulling="2025-12-03 05:54:10.382566746 +0000 UTC m=+774.318027587" observedRunningTime="2025-12-03 05:54:11.00846753 +0000 UTC m=+774.943928371" watchObservedRunningTime="2025-12-03 05:54:11.010903137 +0000 UTC m=+774.946363978" Dec 03 05:54:14 crc kubenswrapper[4810]: I1203 05:54:14.662113 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:54:14 crc kubenswrapper[4810]: I1203 05:54:14.702056 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.081846 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v25sw"] Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.084382 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v25sw" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="registry-server" containerID="cri-o://21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3" gracePeriod=2 Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.493270 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.599904 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-catalog-content\") pod \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.600000 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-utilities\") pod \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.600066 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vt9r\" (UniqueName: \"kubernetes.io/projected/9e856a4f-1c7e-4897-a853-bc80a9b9a659-kube-api-access-4vt9r\") pod \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\" (UID: \"9e856a4f-1c7e-4897-a853-bc80a9b9a659\") " Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.604920 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-utilities" (OuterVolumeSpecName: "utilities") pod "9e856a4f-1c7e-4897-a853-bc80a9b9a659" (UID: "9e856a4f-1c7e-4897-a853-bc80a9b9a659"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.607424 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e856a4f-1c7e-4897-a853-bc80a9b9a659-kube-api-access-4vt9r" (OuterVolumeSpecName: "kube-api-access-4vt9r") pod "9e856a4f-1c7e-4897-a853-bc80a9b9a659" (UID: "9e856a4f-1c7e-4897-a853-bc80a9b9a659"). InnerVolumeSpecName "kube-api-access-4vt9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.703226 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.703444 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vt9r\" (UniqueName: \"kubernetes.io/projected/9e856a4f-1c7e-4897-a853-bc80a9b9a659-kube-api-access-4vt9r\") on node \"crc\" DevicePath \"\"" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.736292 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9e856a4f-1c7e-4897-a853-bc80a9b9a659" (UID: "9e856a4f-1c7e-4897-a853-bc80a9b9a659"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:54:17 crc kubenswrapper[4810]: I1203 05:54:17.804618 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e856a4f-1c7e-4897-a853-bc80a9b9a659-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.008193 4810 generic.go:334] "Generic (PLEG): container finished" podID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerID="21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3" exitCode=0 Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.008246 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerDied","Data":"21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3"} Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.008274 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v25sw" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.008294 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v25sw" event={"ID":"9e856a4f-1c7e-4897-a853-bc80a9b9a659","Type":"ContainerDied","Data":"0c83b6b0011b123a0ae70c06463cb9553565c413000c6c425d091ddf3f28b332"} Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.008318 4810 scope.go:117] "RemoveContainer" containerID="21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.023243 4810 scope.go:117] "RemoveContainer" containerID="d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.037446 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v25sw"] Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.041961 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v25sw"] Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.059855 4810 scope.go:117] "RemoveContainer" containerID="1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.075600 4810 scope.go:117] "RemoveContainer" containerID="21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3" Dec 03 05:54:18 crc kubenswrapper[4810]: E1203 05:54:18.076199 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3\": container with ID starting with 21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3 not found: ID does not exist" containerID="21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.076237 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3"} err="failed to get container status \"21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3\": rpc error: code = NotFound desc = could not find container \"21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3\": container with ID starting with 21e31f9053d0a0ef3c86b1f5e53b8636054ef4634a74c519e8559a7e639755d3 not found: ID does not exist" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.076267 4810 scope.go:117] "RemoveContainer" containerID="d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f" Dec 03 05:54:18 crc kubenswrapper[4810]: E1203 05:54:18.076573 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f\": container with ID starting with d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f not found: ID does not exist" containerID="d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.076608 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f"} err="failed to get container status \"d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f\": rpc error: code = NotFound desc = could not find container \"d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f\": container with ID starting with d2fea7f45d5a51a8f7dababa6039444965d1497d72ae33ec9947bd083184574f not found: ID does not exist" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.076627 4810 scope.go:117] "RemoveContainer" containerID="1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea" Dec 03 05:54:18 crc kubenswrapper[4810]: E1203 05:54:18.076986 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea\": container with ID starting with 1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea not found: ID does not exist" containerID="1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.077024 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea"} err="failed to get container status \"1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea\": rpc error: code = NotFound desc = could not find container \"1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea\": container with ID starting with 1eb80d603537c66a6437c05789697886535740cbc9626bc43e7e0c32751bacea not found: ID does not exist" Dec 03 05:54:18 crc kubenswrapper[4810]: I1203 05:54:18.384090 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" path="/var/lib/kubelet/pods/9e856a4f-1c7e-4897-a853-bc80a9b9a659/volumes" Dec 03 05:54:22 crc kubenswrapper[4810]: I1203 05:54:22.586604 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6dfb784c-hjvnl" Dec 03 05:54:25 crc kubenswrapper[4810]: I1203 05:54:25.677259 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:54:25 crc kubenswrapper[4810]: I1203 05:54:25.677881 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:54:25 crc kubenswrapper[4810]: I1203 05:54:25.677940 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:54:25 crc kubenswrapper[4810]: I1203 05:54:25.678503 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40837cb8664b98412109347434fe923106e68d7421229ce0ff512909a9e08061"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 05:54:25 crc kubenswrapper[4810]: I1203 05:54:25.678555 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://40837cb8664b98412109347434fe923106e68d7421229ce0ff512909a9e08061" gracePeriod=600 Dec 03 05:54:26 crc kubenswrapper[4810]: I1203 05:54:26.055696 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="40837cb8664b98412109347434fe923106e68d7421229ce0ff512909a9e08061" exitCode=0 Dec 03 05:54:26 crc kubenswrapper[4810]: I1203 05:54:26.055766 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"40837cb8664b98412109347434fe923106e68d7421229ce0ff512909a9e08061"} Dec 03 05:54:26 crc kubenswrapper[4810]: I1203 05:54:26.056053 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"54276ea7f067e034e52d0f67559acc42b63d689d697478d16f1565a902279985"} Dec 03 05:54:26 crc kubenswrapper[4810]: I1203 05:54:26.056072 4810 scope.go:117] "RemoveContainer" containerID="79f862664f90d390c6e62915bb612e84b518e4e80e7a906eba37def23e22efe1" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.491636 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5xcsg"] Dec 03 05:54:40 crc kubenswrapper[4810]: E1203 05:54:40.492985 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="registry-server" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.493009 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="registry-server" Dec 03 05:54:40 crc kubenswrapper[4810]: E1203 05:54:40.493063 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="extract-utilities" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.493077 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="extract-utilities" Dec 03 05:54:40 crc kubenswrapper[4810]: E1203 05:54:40.493099 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="extract-content" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.493110 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="extract-content" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.493708 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e856a4f-1c7e-4897-a853-bc80a9b9a659" containerName="registry-server" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.498788 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.514823 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xcsg"] Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.671701 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21d02e76-e70c-46ad-848f-bfdee9c9f81f-utilities\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.671858 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2tvs\" (UniqueName: \"kubernetes.io/projected/21d02e76-e70c-46ad-848f-bfdee9c9f81f-kube-api-access-v2tvs\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.671984 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21d02e76-e70c-46ad-848f-bfdee9c9f81f-catalog-content\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.773258 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2tvs\" (UniqueName: \"kubernetes.io/projected/21d02e76-e70c-46ad-848f-bfdee9c9f81f-kube-api-access-v2tvs\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.773319 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21d02e76-e70c-46ad-848f-bfdee9c9f81f-catalog-content\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.773407 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21d02e76-e70c-46ad-848f-bfdee9c9f81f-utilities\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.773870 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21d02e76-e70c-46ad-848f-bfdee9c9f81f-utilities\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.773962 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21d02e76-e70c-46ad-848f-bfdee9c9f81f-catalog-content\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.801273 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2tvs\" (UniqueName: \"kubernetes.io/projected/21d02e76-e70c-46ad-848f-bfdee9c9f81f-kube-api-access-v2tvs\") pod \"community-operators-5xcsg\" (UID: \"21d02e76-e70c-46ad-848f-bfdee9c9f81f\") " pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:40 crc kubenswrapper[4810]: I1203 05:54:40.874880 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:54:41 crc kubenswrapper[4810]: W1203 05:54:41.342769 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21d02e76_e70c_46ad_848f_bfdee9c9f81f.slice/crio-a9dda574726876224144e5ba9fe74feb772e291524e38b803b49ab4af4d8edf9 WatchSource:0}: Error finding container a9dda574726876224144e5ba9fe74feb772e291524e38b803b49ab4af4d8edf9: Status 404 returned error can't find the container with id a9dda574726876224144e5ba9fe74feb772e291524e38b803b49ab4af4d8edf9 Dec 03 05:54:41 crc kubenswrapper[4810]: I1203 05:54:41.355354 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xcsg"] Dec 03 05:54:42 crc kubenswrapper[4810]: I1203 05:54:42.161740 4810 generic.go:334] "Generic (PLEG): container finished" podID="21d02e76-e70c-46ad-848f-bfdee9c9f81f" containerID="8e9914d4a3084f1df031ce200dbf0defff4232f6fc136e776ee6f71b971e43cb" exitCode=0 Dec 03 05:54:42 crc kubenswrapper[4810]: I1203 05:54:42.161831 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xcsg" event={"ID":"21d02e76-e70c-46ad-848f-bfdee9c9f81f","Type":"ContainerDied","Data":"8e9914d4a3084f1df031ce200dbf0defff4232f6fc136e776ee6f71b971e43cb"} Dec 03 05:54:42 crc kubenswrapper[4810]: I1203 05:54:42.162153 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xcsg" event={"ID":"21d02e76-e70c-46ad-848f-bfdee9c9f81f","Type":"ContainerStarted","Data":"a9dda574726876224144e5ba9fe74feb772e291524e38b803b49ab4af4d8edf9"} Dec 03 05:54:42 crc kubenswrapper[4810]: I1203 05:54:42.220548 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-579cbb8d94-dxs7n" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.040147 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-klmlk"] Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.043304 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.043741 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx"] Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.044586 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.047525 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-zt7q6" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.047686 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.047698 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.048586 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.052593 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx"] Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.145718 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-hpcxn"] Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.147261 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.149963 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.150057 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.150184 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.151491 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-5s57c" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.158208 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-6nc6q"] Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.159101 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.161481 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.179146 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-6nc6q"] Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201779 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/861b1b72-05ca-4e91-a015-64939a072ec2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5j7rx\" (UID: \"861b1b72-05ca-4e91-a015-64939a072ec2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201826 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhk4d\" (UniqueName: \"kubernetes.io/projected/861b1b72-05ca-4e91-a015-64939a072ec2-kube-api-access-nhk4d\") pod \"frr-k8s-webhook-server-7fcb986d4-5j7rx\" (UID: \"861b1b72-05ca-4e91-a015-64939a072ec2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201858 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-conf\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201876 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-sockets\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201901 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-startup\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201933 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-reloader\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201961 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-metrics\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.201991 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/953ec342-87c7-4803-96cf-bf3e6e4592aa-metrics-certs\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.202015 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82btl\" (UniqueName: \"kubernetes.io/projected/953ec342-87c7-4803-96cf-bf3e6e4592aa-kube-api-access-82btl\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303753 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/953ec342-87c7-4803-96cf-bf3e6e4592aa-metrics-certs\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303838 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303868 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/edef3e76-a314-4543-9a0b-592d76cbed2a-metrics-certs\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303893 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82btl\" (UniqueName: \"kubernetes.io/projected/953ec342-87c7-4803-96cf-bf3e6e4592aa-kube-api-access-82btl\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303916 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtc4v\" (UniqueName: \"kubernetes.io/projected/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-kube-api-access-gtc4v\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303944 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/edef3e76-a314-4543-9a0b-592d76cbed2a-kube-api-access-z4gvf\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.303976 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/861b1b72-05ca-4e91-a015-64939a072ec2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5j7rx\" (UID: \"861b1b72-05ca-4e91-a015-64939a072ec2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304000 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhk4d\" (UniqueName: \"kubernetes.io/projected/861b1b72-05ca-4e91-a015-64939a072ec2-kube-api-access-nhk4d\") pod \"frr-k8s-webhook-server-7fcb986d4-5j7rx\" (UID: \"861b1b72-05ca-4e91-a015-64939a072ec2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304029 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-conf\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304065 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-sockets\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304101 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/edef3e76-a314-4543-9a0b-592d76cbed2a-cert\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304122 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-startup\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304158 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-reloader\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304180 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-metallb-excludel2\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304207 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-metrics-certs\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304231 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-metrics\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.304916 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-metrics\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.305594 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-conf\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.305928 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-reloader\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.306191 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-sockets\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.306868 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/953ec342-87c7-4803-96cf-bf3e6e4592aa-frr-startup\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.315656 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/953ec342-87c7-4803-96cf-bf3e6e4592aa-metrics-certs\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.327380 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/861b1b72-05ca-4e91-a015-64939a072ec2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5j7rx\" (UID: \"861b1b72-05ca-4e91-a015-64939a072ec2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.327475 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82btl\" (UniqueName: \"kubernetes.io/projected/953ec342-87c7-4803-96cf-bf3e6e4592aa-kube-api-access-82btl\") pod \"frr-k8s-klmlk\" (UID: \"953ec342-87c7-4803-96cf-bf3e6e4592aa\") " pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.331691 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhk4d\" (UniqueName: \"kubernetes.io/projected/861b1b72-05ca-4e91-a015-64939a072ec2-kube-api-access-nhk4d\") pod \"frr-k8s-webhook-server-7fcb986d4-5j7rx\" (UID: \"861b1b72-05ca-4e91-a015-64939a072ec2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.381044 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.389032 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405610 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/edef3e76-a314-4543-9a0b-592d76cbed2a-cert\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405678 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-metallb-excludel2\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405710 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-metrics-certs\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405774 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405793 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/edef3e76-a314-4543-9a0b-592d76cbed2a-metrics-certs\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405815 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtc4v\" (UniqueName: \"kubernetes.io/projected/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-kube-api-access-gtc4v\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.405835 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/edef3e76-a314-4543-9a0b-592d76cbed2a-kube-api-access-z4gvf\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: E1203 05:54:43.405964 4810 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 05:54:43 crc kubenswrapper[4810]: E1203 05:54:43.406067 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist podName:1bf9e275-7fc1-43d1-b95a-b19e459fda0c nodeName:}" failed. No retries permitted until 2025-12-03 05:54:43.906043004 +0000 UTC m=+807.841503845 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist") pod "speaker-hpcxn" (UID: "1bf9e275-7fc1-43d1-b95a-b19e459fda0c") : secret "metallb-memberlist" not found Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.406921 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-metallb-excludel2\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.409375 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/edef3e76-a314-4543-9a0b-592d76cbed2a-metrics-certs\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.410058 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-metrics-certs\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.410508 4810 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.425641 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/edef3e76-a314-4543-9a0b-592d76cbed2a-kube-api-access-z4gvf\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.426692 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/edef3e76-a314-4543-9a0b-592d76cbed2a-cert\") pod \"controller-f8648f98b-6nc6q\" (UID: \"edef3e76-a314-4543-9a0b-592d76cbed2a\") " pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.428897 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtc4v\" (UniqueName: \"kubernetes.io/projected/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-kube-api-access-gtc4v\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.478288 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.814851 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx"] Dec 03 05:54:43 crc kubenswrapper[4810]: W1203 05:54:43.823465 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod861b1b72_05ca_4e91_a015_64939a072ec2.slice/crio-54c22187bab77b67298a1b71e71019d506831de12a396b16a90784f96c1a258a WatchSource:0}: Error finding container 54c22187bab77b67298a1b71e71019d506831de12a396b16a90784f96c1a258a: Status 404 returned error can't find the container with id 54c22187bab77b67298a1b71e71019d506831de12a396b16a90784f96c1a258a Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.895921 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-6nc6q"] Dec 03 05:54:43 crc kubenswrapper[4810]: W1203 05:54:43.897772 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedef3e76_a314_4543_9a0b_592d76cbed2a.slice/crio-4ce0c4c8acee20f3efc3a026389756f60e087d434c8d3ab0a4b3bef18ede121d WatchSource:0}: Error finding container 4ce0c4c8acee20f3efc3a026389756f60e087d434c8d3ab0a4b3bef18ede121d: Status 404 returned error can't find the container with id 4ce0c4c8acee20f3efc3a026389756f60e087d434c8d3ab0a4b3bef18ede121d Dec 03 05:54:43 crc kubenswrapper[4810]: I1203 05:54:43.912259 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:43 crc kubenswrapper[4810]: E1203 05:54:43.912404 4810 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 05:54:43 crc kubenswrapper[4810]: E1203 05:54:43.912453 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist podName:1bf9e275-7fc1-43d1-b95a-b19e459fda0c nodeName:}" failed. No retries permitted until 2025-12-03 05:54:44.912437535 +0000 UTC m=+808.847898376 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist") pod "speaker-hpcxn" (UID: "1bf9e275-7fc1-43d1-b95a-b19e459fda0c") : secret "metallb-memberlist" not found Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.176441 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" event={"ID":"861b1b72-05ca-4e91-a015-64939a072ec2","Type":"ContainerStarted","Data":"54c22187bab77b67298a1b71e71019d506831de12a396b16a90784f96c1a258a"} Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.179090 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-6nc6q" event={"ID":"edef3e76-a314-4543-9a0b-592d76cbed2a","Type":"ContainerStarted","Data":"860d18b9f87f0c64542d7d618a68b99d1534621214ad89172c19d8b7ef6b95ee"} Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.179127 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-6nc6q" event={"ID":"edef3e76-a314-4543-9a0b-592d76cbed2a","Type":"ContainerStarted","Data":"ed06b6bc5ee73ddb5671a02ec8e8dc77fba8459dc3027c93d1bf934c9d9ebdb4"} Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.179139 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-6nc6q" event={"ID":"edef3e76-a314-4543-9a0b-592d76cbed2a","Type":"ContainerStarted","Data":"4ce0c4c8acee20f3efc3a026389756f60e087d434c8d3ab0a4b3bef18ede121d"} Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.179220 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.181382 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"4b404a6169275f1e0c423693f1b4109f1d70343f155bcff9548d42c1d4da735c"} Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.199064 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-6nc6q" podStartSLOduration=1.199039288 podStartE2EDuration="1.199039288s" podCreationTimestamp="2025-12-03 05:54:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:54:44.196946845 +0000 UTC m=+808.132407696" watchObservedRunningTime="2025-12-03 05:54:44.199039288 +0000 UTC m=+808.134500129" Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.946495 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.953057 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1bf9e275-7fc1-43d1-b95a-b19e459fda0c-memberlist\") pod \"speaker-hpcxn\" (UID: \"1bf9e275-7fc1-43d1-b95a-b19e459fda0c\") " pod="metallb-system/speaker-hpcxn" Dec 03 05:54:44 crc kubenswrapper[4810]: I1203 05:54:44.961791 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hpcxn" Dec 03 05:54:44 crc kubenswrapper[4810]: W1203 05:54:44.989263 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bf9e275_7fc1_43d1_b95a_b19e459fda0c.slice/crio-3cfb739c22cbfa1a95c6d8b2037efd3f164b23a7aa8d684bf12a5d1c9dc818cc WatchSource:0}: Error finding container 3cfb739c22cbfa1a95c6d8b2037efd3f164b23a7aa8d684bf12a5d1c9dc818cc: Status 404 returned error can't find the container with id 3cfb739c22cbfa1a95c6d8b2037efd3f164b23a7aa8d684bf12a5d1c9dc818cc Dec 03 05:54:45 crc kubenswrapper[4810]: I1203 05:54:45.197551 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hpcxn" event={"ID":"1bf9e275-7fc1-43d1-b95a-b19e459fda0c","Type":"ContainerStarted","Data":"3cfb739c22cbfa1a95c6d8b2037efd3f164b23a7aa8d684bf12a5d1c9dc818cc"} Dec 03 05:54:46 crc kubenswrapper[4810]: I1203 05:54:46.209811 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hpcxn" event={"ID":"1bf9e275-7fc1-43d1-b95a-b19e459fda0c","Type":"ContainerStarted","Data":"8e8888d46cf6f9c4b473f6850b9310234229d17c0c34fb183730dd67ddeecc66"} Dec 03 05:54:46 crc kubenswrapper[4810]: I1203 05:54:46.209863 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hpcxn" event={"ID":"1bf9e275-7fc1-43d1-b95a-b19e459fda0c","Type":"ContainerStarted","Data":"4203c35cfb49f6c0963cc8312fefbd81298ae1b436b580ced4adbdf007c13649"} Dec 03 05:54:46 crc kubenswrapper[4810]: I1203 05:54:46.210184 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-hpcxn" Dec 03 05:54:46 crc kubenswrapper[4810]: I1203 05:54:46.237268 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-hpcxn" podStartSLOduration=3.237241315 podStartE2EDuration="3.237241315s" podCreationTimestamp="2025-12-03 05:54:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:54:46.231724485 +0000 UTC m=+810.167185336" watchObservedRunningTime="2025-12-03 05:54:46.237241315 +0000 UTC m=+810.172702176" Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.303269 4810 generic.go:334] "Generic (PLEG): container finished" podID="21d02e76-e70c-46ad-848f-bfdee9c9f81f" containerID="4ea1b6678a0ae812bde362a44a28882d6e9ae85427f5cee5f881483114541b7f" exitCode=0 Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.303373 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xcsg" event={"ID":"21d02e76-e70c-46ad-848f-bfdee9c9f81f","Type":"ContainerDied","Data":"4ea1b6678a0ae812bde362a44a28882d6e9ae85427f5cee5f881483114541b7f"} Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.307042 4810 generic.go:334] "Generic (PLEG): container finished" podID="953ec342-87c7-4803-96cf-bf3e6e4592aa" containerID="9d8f20169693a0077d408d74fbd86a10c979eeff3f387b67eaca8ef86fa4d9c5" exitCode=0 Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.307188 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerDied","Data":"9d8f20169693a0077d408d74fbd86a10c979eeff3f387b67eaca8ef86fa4d9c5"} Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.315207 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" event={"ID":"861b1b72-05ca-4e91-a015-64939a072ec2","Type":"ContainerStarted","Data":"90aa79517d06cadff258bec86462745d59d117a122a616dddcb933b611572b6f"} Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.315500 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:54:53 crc kubenswrapper[4810]: I1203 05:54:53.392958 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" podStartSLOduration=1.7828182849999998 podStartE2EDuration="10.392937177s" podCreationTimestamp="2025-12-03 05:54:43 +0000 UTC" firstStartedPulling="2025-12-03 05:54:43.826166576 +0000 UTC m=+807.761627417" lastFinishedPulling="2025-12-03 05:54:52.436285468 +0000 UTC m=+816.371746309" observedRunningTime="2025-12-03 05:54:53.391776288 +0000 UTC m=+817.327237129" watchObservedRunningTime="2025-12-03 05:54:53.392937177 +0000 UTC m=+817.328398018" Dec 03 05:54:54 crc kubenswrapper[4810]: I1203 05:54:54.325596 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xcsg" event={"ID":"21d02e76-e70c-46ad-848f-bfdee9c9f81f","Type":"ContainerStarted","Data":"95fbd7c375fee2888020e56d27947971a913009bb663bd55bcb0e519a3f3c3b8"} Dec 03 05:54:54 crc kubenswrapper[4810]: I1203 05:54:54.327951 4810 generic.go:334] "Generic (PLEG): container finished" podID="953ec342-87c7-4803-96cf-bf3e6e4592aa" containerID="8dfa80e8b52ccbf17f56d2f7ce963268e03d2c5bfada3f9c030956bd07740593" exitCode=0 Dec 03 05:54:54 crc kubenswrapper[4810]: I1203 05:54:54.327996 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerDied","Data":"8dfa80e8b52ccbf17f56d2f7ce963268e03d2c5bfada3f9c030956bd07740593"} Dec 03 05:54:54 crc kubenswrapper[4810]: I1203 05:54:54.350924 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5xcsg" podStartSLOduration=2.7487125519999998 podStartE2EDuration="14.35090404s" podCreationTimestamp="2025-12-03 05:54:40 +0000 UTC" firstStartedPulling="2025-12-03 05:54:42.165025868 +0000 UTC m=+806.100486709" lastFinishedPulling="2025-12-03 05:54:53.767217326 +0000 UTC m=+817.702678197" observedRunningTime="2025-12-03 05:54:54.345842951 +0000 UTC m=+818.281303812" watchObservedRunningTime="2025-12-03 05:54:54.35090404 +0000 UTC m=+818.286364871" Dec 03 05:54:55 crc kubenswrapper[4810]: I1203 05:54:55.336713 4810 generic.go:334] "Generic (PLEG): container finished" podID="953ec342-87c7-4803-96cf-bf3e6e4592aa" containerID="a7d9f8558ad220bc6cbdf0096d46ba65cc375a92ccdbe61e28102caefda8cbaf" exitCode=0 Dec 03 05:54:55 crc kubenswrapper[4810]: I1203 05:54:55.336843 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerDied","Data":"a7d9f8558ad220bc6cbdf0096d46ba65cc375a92ccdbe61e28102caefda8cbaf"} Dec 03 05:54:56 crc kubenswrapper[4810]: I1203 05:54:56.346311 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"4b5ab8e9514f1f189e400ab5e12dd434fe44b1c475cb30e6e7f14121dd8f6b14"} Dec 03 05:54:56 crc kubenswrapper[4810]: I1203 05:54:56.346832 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"81872ae5c7c99a2000af77d30b3568f6c41607ad2faa98f486ccf41efe6f62eb"} Dec 03 05:54:56 crc kubenswrapper[4810]: I1203 05:54:56.346850 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"68c84b9af505dd76988e8516b5c45a2af4494f34f5ade6e5ed05a7c1ce4526fc"} Dec 03 05:54:56 crc kubenswrapper[4810]: I1203 05:54:56.346865 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"eb4711af1f506d752cae1dfabef9b70deb68c82d21b9a516b35378adc4fefd8e"} Dec 03 05:54:56 crc kubenswrapper[4810]: I1203 05:54:56.346879 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"7d95f2dafaa890655b126af22d2ab1538c2409e3f14303d485f4697520131bb3"} Dec 03 05:54:57 crc kubenswrapper[4810]: I1203 05:54:57.357446 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-klmlk" event={"ID":"953ec342-87c7-4803-96cf-bf3e6e4592aa","Type":"ContainerStarted","Data":"5714ab683b724a1f1fa4b40fe85636d38eb284259225dc325c10bf7a7297b524"} Dec 03 05:54:57 crc kubenswrapper[4810]: I1203 05:54:57.359125 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:57 crc kubenswrapper[4810]: I1203 05:54:57.394507 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-klmlk" podStartSLOduration=5.738451304 podStartE2EDuration="14.394488122s" podCreationTimestamp="2025-12-03 05:54:43 +0000 UTC" firstStartedPulling="2025-12-03 05:54:43.755383169 +0000 UTC m=+807.690844010" lastFinishedPulling="2025-12-03 05:54:52.411419987 +0000 UTC m=+816.346880828" observedRunningTime="2025-12-03 05:54:57.391234159 +0000 UTC m=+821.326695020" watchObservedRunningTime="2025-12-03 05:54:57.394488122 +0000 UTC m=+821.329948953" Dec 03 05:54:58 crc kubenswrapper[4810]: I1203 05:54:58.390054 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-klmlk" Dec 03 05:54:58 crc kubenswrapper[4810]: I1203 05:54:58.416962 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-klmlk" Dec 03 05:55:00 crc kubenswrapper[4810]: I1203 05:55:00.875301 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:55:00 crc kubenswrapper[4810]: I1203 05:55:00.875652 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:55:00 crc kubenswrapper[4810]: I1203 05:55:00.941625 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:55:01 crc kubenswrapper[4810]: I1203 05:55:01.454303 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5xcsg" Dec 03 05:55:01 crc kubenswrapper[4810]: I1203 05:55:01.540418 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xcsg"] Dec 03 05:55:01 crc kubenswrapper[4810]: I1203 05:55:01.598329 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q46cp"] Dec 03 05:55:01 crc kubenswrapper[4810]: I1203 05:55:01.598611 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q46cp" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="registry-server" containerID="cri-o://6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde" gracePeriod=2 Dec 03 05:55:02 crc kubenswrapper[4810]: E1203 05:55:02.087383 4810 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde is running failed: container process not found" containerID="6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 05:55:02 crc kubenswrapper[4810]: E1203 05:55:02.087828 4810 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde is running failed: container process not found" containerID="6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 05:55:02 crc kubenswrapper[4810]: E1203 05:55:02.088220 4810 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde is running failed: container process not found" containerID="6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 05:55:02 crc kubenswrapper[4810]: E1203 05:55:02.088248 4810 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-q46cp" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="registry-server" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.403651 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerDied","Data":"6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde"} Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.403746 4810 generic.go:334] "Generic (PLEG): container finished" podID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerID="6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde" exitCode=0 Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.528749 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.666318 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pkpm\" (UniqueName: \"kubernetes.io/projected/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-kube-api-access-6pkpm\") pod \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.666391 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-catalog-content\") pod \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.666461 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-utilities\") pod \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\" (UID: \"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7\") " Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.667451 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-utilities" (OuterVolumeSpecName: "utilities") pod "e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" (UID: "e34bac67-ab0b-44a1-9033-3f76e6f0bdb7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.678227 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-kube-api-access-6pkpm" (OuterVolumeSpecName: "kube-api-access-6pkpm") pod "e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" (UID: "e34bac67-ab0b-44a1-9033-3f76e6f0bdb7"). InnerVolumeSpecName "kube-api-access-6pkpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.724970 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" (UID: "e34bac67-ab0b-44a1-9033-3f76e6f0bdb7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.768532 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pkpm\" (UniqueName: \"kubernetes.io/projected/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-kube-api-access-6pkpm\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.768587 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:02 crc kubenswrapper[4810]: I1203 05:55:02.768601 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.396348 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5j7rx" Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.417548 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q46cp" Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.417716 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q46cp" event={"ID":"e34bac67-ab0b-44a1-9033-3f76e6f0bdb7","Type":"ContainerDied","Data":"a79e6a46c6ba2f1a0f5e3f0d4701895e2d838e8e661d19b768b44ef86af89e1b"} Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.417858 4810 scope.go:117] "RemoveContainer" containerID="6fc0dc4d7d5eeda12d51df41a71da80f7063c7c2c126227687c586e075bbebde" Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.454717 4810 scope.go:117] "RemoveContainer" containerID="677b5bb197788eef5cd3b8b0def924df49180ee0eaa4d4cebbd377c531079944" Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.462346 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q46cp"] Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.469733 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q46cp"] Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.482000 4810 scope.go:117] "RemoveContainer" containerID="ed2dd30de4f04eb3d9b1d63a2b21cf2c5f857ecccddfd0dc0f0fd2fdf056e93d" Dec 03 05:55:03 crc kubenswrapper[4810]: I1203 05:55:03.482445 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-6nc6q" Dec 03 05:55:04 crc kubenswrapper[4810]: I1203 05:55:04.392062 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" path="/var/lib/kubelet/pods/e34bac67-ab0b-44a1-9033-3f76e6f0bdb7/volumes" Dec 03 05:55:04 crc kubenswrapper[4810]: I1203 05:55:04.966393 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-hpcxn" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.948179 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ttklk"] Dec 03 05:55:07 crc kubenswrapper[4810]: E1203 05:55:07.949759 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="extract-utilities" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.949862 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="extract-utilities" Dec 03 05:55:07 crc kubenswrapper[4810]: E1203 05:55:07.949918 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="extract-content" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.949966 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="extract-content" Dec 03 05:55:07 crc kubenswrapper[4810]: E1203 05:55:07.950049 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="registry-server" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.950105 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="registry-server" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.950329 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e34bac67-ab0b-44a1-9033-3f76e6f0bdb7" containerName="registry-server" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.950941 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.955173 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.956206 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-xjbk2" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.956331 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 05:55:07 crc kubenswrapper[4810]: I1203 05:55:07.969802 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ttklk"] Dec 03 05:55:08 crc kubenswrapper[4810]: I1203 05:55:08.050208 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxjhv\" (UniqueName: \"kubernetes.io/projected/ffce2bad-e568-4efc-a3e4-e2324deda647-kube-api-access-gxjhv\") pod \"openstack-operator-index-ttklk\" (UID: \"ffce2bad-e568-4efc-a3e4-e2324deda647\") " pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:08 crc kubenswrapper[4810]: I1203 05:55:08.152221 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxjhv\" (UniqueName: \"kubernetes.io/projected/ffce2bad-e568-4efc-a3e4-e2324deda647-kube-api-access-gxjhv\") pod \"openstack-operator-index-ttklk\" (UID: \"ffce2bad-e568-4efc-a3e4-e2324deda647\") " pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:08 crc kubenswrapper[4810]: I1203 05:55:08.172277 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxjhv\" (UniqueName: \"kubernetes.io/projected/ffce2bad-e568-4efc-a3e4-e2324deda647-kube-api-access-gxjhv\") pod \"openstack-operator-index-ttklk\" (UID: \"ffce2bad-e568-4efc-a3e4-e2324deda647\") " pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:08 crc kubenswrapper[4810]: I1203 05:55:08.284908 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:08 crc kubenswrapper[4810]: I1203 05:55:08.518072 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ttklk"] Dec 03 05:55:09 crc kubenswrapper[4810]: I1203 05:55:09.487135 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ttklk" event={"ID":"ffce2bad-e568-4efc-a3e4-e2324deda647","Type":"ContainerStarted","Data":"b360079b6d62872d23a8f90e8c6300ced823e916198a76c1c0bf00a11641d7e0"} Dec 03 05:55:09 crc kubenswrapper[4810]: I1203 05:55:09.922188 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-ttklk"] Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.499285 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ttklk" event={"ID":"ffce2bad-e568-4efc-a3e4-e2324deda647","Type":"ContainerStarted","Data":"94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd"} Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.499481 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-ttklk" podUID="ffce2bad-e568-4efc-a3e4-e2324deda647" containerName="registry-server" containerID="cri-o://94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd" gracePeriod=2 Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.524639 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ttklk" podStartSLOduration=1.901390973 podStartE2EDuration="3.524616198s" podCreationTimestamp="2025-12-03 05:55:07 +0000 UTC" firstStartedPulling="2025-12-03 05:55:08.528970852 +0000 UTC m=+832.464431693" lastFinishedPulling="2025-12-03 05:55:10.152196077 +0000 UTC m=+834.087656918" observedRunningTime="2025-12-03 05:55:10.517547299 +0000 UTC m=+834.453008160" watchObservedRunningTime="2025-12-03 05:55:10.524616198 +0000 UTC m=+834.460077039" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.537774 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mmn6k"] Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.538641 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.543421 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mmn6k"] Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.691771 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6cht\" (UniqueName: \"kubernetes.io/projected/6a2591e5-764f-4e99-90d5-c0942ee5c434-kube-api-access-f6cht\") pod \"openstack-operator-index-mmn6k\" (UID: \"6a2591e5-764f-4e99-90d5-c0942ee5c434\") " pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.793812 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6cht\" (UniqueName: \"kubernetes.io/projected/6a2591e5-764f-4e99-90d5-c0942ee5c434-kube-api-access-f6cht\") pod \"openstack-operator-index-mmn6k\" (UID: \"6a2591e5-764f-4e99-90d5-c0942ee5c434\") " pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.819459 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6cht\" (UniqueName: \"kubernetes.io/projected/6a2591e5-764f-4e99-90d5-c0942ee5c434-kube-api-access-f6cht\") pod \"openstack-operator-index-mmn6k\" (UID: \"6a2591e5-764f-4e99-90d5-c0942ee5c434\") " pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.889780 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.900811 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:10 crc kubenswrapper[4810]: I1203 05:55:10.996984 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxjhv\" (UniqueName: \"kubernetes.io/projected/ffce2bad-e568-4efc-a3e4-e2324deda647-kube-api-access-gxjhv\") pod \"ffce2bad-e568-4efc-a3e4-e2324deda647\" (UID: \"ffce2bad-e568-4efc-a3e4-e2324deda647\") " Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.006275 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffce2bad-e568-4efc-a3e4-e2324deda647-kube-api-access-gxjhv" (OuterVolumeSpecName: "kube-api-access-gxjhv") pod "ffce2bad-e568-4efc-a3e4-e2324deda647" (UID: "ffce2bad-e568-4efc-a3e4-e2324deda647"). InnerVolumeSpecName "kube-api-access-gxjhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.099222 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxjhv\" (UniqueName: \"kubernetes.io/projected/ffce2bad-e568-4efc-a3e4-e2324deda647-kube-api-access-gxjhv\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.135276 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mmn6k"] Dec 03 05:55:11 crc kubenswrapper[4810]: W1203 05:55:11.147471 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a2591e5_764f_4e99_90d5_c0942ee5c434.slice/crio-fc3f91a21d1fb108db882e276dec2dd74e4c1298bf60387d6edbeb22ce26f05f WatchSource:0}: Error finding container fc3f91a21d1fb108db882e276dec2dd74e4c1298bf60387d6edbeb22ce26f05f: Status 404 returned error can't find the container with id fc3f91a21d1fb108db882e276dec2dd74e4c1298bf60387d6edbeb22ce26f05f Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.508879 4810 generic.go:334] "Generic (PLEG): container finished" podID="ffce2bad-e568-4efc-a3e4-e2324deda647" containerID="94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd" exitCode=0 Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.509016 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ttklk" event={"ID":"ffce2bad-e568-4efc-a3e4-e2324deda647","Type":"ContainerDied","Data":"94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd"} Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.509148 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ttklk" event={"ID":"ffce2bad-e568-4efc-a3e4-e2324deda647","Type":"ContainerDied","Data":"b360079b6d62872d23a8f90e8c6300ced823e916198a76c1c0bf00a11641d7e0"} Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.509175 4810 scope.go:117] "RemoveContainer" containerID="94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd" Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.509012 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ttklk" Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.516291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mmn6k" event={"ID":"6a2591e5-764f-4e99-90d5-c0942ee5c434","Type":"ContainerStarted","Data":"fc3f91a21d1fb108db882e276dec2dd74e4c1298bf60387d6edbeb22ce26f05f"} Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.552421 4810 scope.go:117] "RemoveContainer" containerID="94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd" Dec 03 05:55:11 crc kubenswrapper[4810]: E1203 05:55:11.554082 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd\": container with ID starting with 94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd not found: ID does not exist" containerID="94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd" Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.554186 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd"} err="failed to get container status \"94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd\": rpc error: code = NotFound desc = could not find container \"94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd\": container with ID starting with 94d7854748331f960905059694005ecfe539e13d4f764a5acaf93437b7f765bd not found: ID does not exist" Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.571376 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-ttklk"] Dec 03 05:55:11 crc kubenswrapper[4810]: I1203 05:55:11.582660 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-ttklk"] Dec 03 05:55:12 crc kubenswrapper[4810]: I1203 05:55:12.389114 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffce2bad-e568-4efc-a3e4-e2324deda647" path="/var/lib/kubelet/pods/ffce2bad-e568-4efc-a3e4-e2324deda647/volumes" Dec 03 05:55:12 crc kubenswrapper[4810]: I1203 05:55:12.531802 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mmn6k" event={"ID":"6a2591e5-764f-4e99-90d5-c0942ee5c434","Type":"ContainerStarted","Data":"e628c7f3072adea9e44a5f3910698041b4998ae05182f2384471fb8a0b109088"} Dec 03 05:55:12 crc kubenswrapper[4810]: I1203 05:55:12.561177 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mmn6k" podStartSLOduration=2.133856619 podStartE2EDuration="2.561148134s" podCreationTimestamp="2025-12-03 05:55:10 +0000 UTC" firstStartedPulling="2025-12-03 05:55:11.152068202 +0000 UTC m=+835.087529043" lastFinishedPulling="2025-12-03 05:55:11.579359677 +0000 UTC m=+835.514820558" observedRunningTime="2025-12-03 05:55:12.552576376 +0000 UTC m=+836.488037247" watchObservedRunningTime="2025-12-03 05:55:12.561148134 +0000 UTC m=+836.496609005" Dec 03 05:55:13 crc kubenswrapper[4810]: I1203 05:55:13.388196 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-klmlk" Dec 03 05:55:20 crc kubenswrapper[4810]: I1203 05:55:20.890548 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:20 crc kubenswrapper[4810]: I1203 05:55:20.891815 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:20 crc kubenswrapper[4810]: I1203 05:55:20.943182 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:21 crc kubenswrapper[4810]: I1203 05:55:21.057331 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-mmn6k" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.790893 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r"] Dec 03 05:55:23 crc kubenswrapper[4810]: E1203 05:55:23.792304 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffce2bad-e568-4efc-a3e4-e2324deda647" containerName="registry-server" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.792329 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffce2bad-e568-4efc-a3e4-e2324deda647" containerName="registry-server" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.792571 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffce2bad-e568-4efc-a3e4-e2324deda647" containerName="registry-server" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.794387 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.799229 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8b9kj" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.800202 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r"] Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.866831 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-bundle\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.866949 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-util\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.866978 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf9c6\" (UniqueName: \"kubernetes.io/projected/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-kube-api-access-mf9c6\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.967697 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-bundle\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.967900 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-util\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.967942 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf9c6\" (UniqueName: \"kubernetes.io/projected/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-kube-api-access-mf9c6\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.968380 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-bundle\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:23 crc kubenswrapper[4810]: I1203 05:55:23.968442 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-util\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:24 crc kubenswrapper[4810]: I1203 05:55:24.001782 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf9c6\" (UniqueName: \"kubernetes.io/projected/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-kube-api-access-mf9c6\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:24 crc kubenswrapper[4810]: I1203 05:55:24.155941 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:24 crc kubenswrapper[4810]: I1203 05:55:24.616944 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r"] Dec 03 05:55:25 crc kubenswrapper[4810]: I1203 05:55:25.040322 4810 generic.go:334] "Generic (PLEG): container finished" podID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerID="eb65408e1b090c8eae397ff322415d70b8a6470f27b98ae94b3142854c6e202e" exitCode=0 Dec 03 05:55:25 crc kubenswrapper[4810]: I1203 05:55:25.040404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" event={"ID":"6e9cc58b-08e2-4bf4-be95-0b3c437559a1","Type":"ContainerDied","Data":"eb65408e1b090c8eae397ff322415d70b8a6470f27b98ae94b3142854c6e202e"} Dec 03 05:55:25 crc kubenswrapper[4810]: I1203 05:55:25.041576 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" event={"ID":"6e9cc58b-08e2-4bf4-be95-0b3c437559a1","Type":"ContainerStarted","Data":"2cdf32e2ad4db3a3eccd0e6f45280450309b16568b048c83a6dedfad9d965a94"} Dec 03 05:55:26 crc kubenswrapper[4810]: I1203 05:55:26.050332 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" event={"ID":"6e9cc58b-08e2-4bf4-be95-0b3c437559a1","Type":"ContainerStarted","Data":"b4ec56a83a85a3b04e248af45d6d78b21e7d2d515142e09197afbdfc3e89ac38"} Dec 03 05:55:27 crc kubenswrapper[4810]: I1203 05:55:27.059866 4810 generic.go:334] "Generic (PLEG): container finished" podID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerID="b4ec56a83a85a3b04e248af45d6d78b21e7d2d515142e09197afbdfc3e89ac38" exitCode=0 Dec 03 05:55:27 crc kubenswrapper[4810]: I1203 05:55:27.059923 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" event={"ID":"6e9cc58b-08e2-4bf4-be95-0b3c437559a1","Type":"ContainerDied","Data":"b4ec56a83a85a3b04e248af45d6d78b21e7d2d515142e09197afbdfc3e89ac38"} Dec 03 05:55:28 crc kubenswrapper[4810]: I1203 05:55:28.070020 4810 generic.go:334] "Generic (PLEG): container finished" podID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerID="8090f5b9a6c8d6d518aa51e54d19266910e10890bd50e03a13b0140fb7bf00cb" exitCode=0 Dec 03 05:55:28 crc kubenswrapper[4810]: I1203 05:55:28.070067 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" event={"ID":"6e9cc58b-08e2-4bf4-be95-0b3c437559a1","Type":"ContainerDied","Data":"8090f5b9a6c8d6d518aa51e54d19266910e10890bd50e03a13b0140fb7bf00cb"} Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.418674 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.557983 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-util\") pod \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.558124 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-bundle\") pod \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.558242 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf9c6\" (UniqueName: \"kubernetes.io/projected/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-kube-api-access-mf9c6\") pod \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\" (UID: \"6e9cc58b-08e2-4bf4-be95-0b3c437559a1\") " Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.558951 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-bundle" (OuterVolumeSpecName: "bundle") pod "6e9cc58b-08e2-4bf4-be95-0b3c437559a1" (UID: "6e9cc58b-08e2-4bf4-be95-0b3c437559a1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.568184 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-kube-api-access-mf9c6" (OuterVolumeSpecName: "kube-api-access-mf9c6") pod "6e9cc58b-08e2-4bf4-be95-0b3c437559a1" (UID: "6e9cc58b-08e2-4bf4-be95-0b3c437559a1"). InnerVolumeSpecName "kube-api-access-mf9c6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.578161 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-util" (OuterVolumeSpecName: "util") pod "6e9cc58b-08e2-4bf4-be95-0b3c437559a1" (UID: "6e9cc58b-08e2-4bf4-be95-0b3c437559a1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.660697 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.660793 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf9c6\" (UniqueName: \"kubernetes.io/projected/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-kube-api-access-mf9c6\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:29 crc kubenswrapper[4810]: I1203 05:55:29.660817 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e9cc58b-08e2-4bf4-be95-0b3c437559a1-util\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:30 crc kubenswrapper[4810]: I1203 05:55:30.088620 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" event={"ID":"6e9cc58b-08e2-4bf4-be95-0b3c437559a1","Type":"ContainerDied","Data":"2cdf32e2ad4db3a3eccd0e6f45280450309b16568b048c83a6dedfad9d965a94"} Dec 03 05:55:30 crc kubenswrapper[4810]: I1203 05:55:30.088672 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cdf32e2ad4db3a3eccd0e6f45280450309b16568b048c83a6dedfad9d965a94" Dec 03 05:55:30 crc kubenswrapper[4810]: I1203 05:55:30.088685 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.264105 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jkg24"] Dec 03 05:55:32 crc kubenswrapper[4810]: E1203 05:55:32.264370 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="pull" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.264385 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="pull" Dec 03 05:55:32 crc kubenswrapper[4810]: E1203 05:55:32.264398 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="util" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.264404 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="util" Dec 03 05:55:32 crc kubenswrapper[4810]: E1203 05:55:32.264417 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="extract" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.264422 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="extract" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.264537 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e9cc58b-08e2-4bf4-be95-0b3c437559a1" containerName="extract" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.265348 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.285117 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkg24"] Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.409664 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxvmg\" (UniqueName: \"kubernetes.io/projected/c388cef9-08df-4344-872e-b8bdfa11b4fc-kube-api-access-wxvmg\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.409877 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-catalog-content\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.409979 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-utilities\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.511356 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-utilities\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.511459 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxvmg\" (UniqueName: \"kubernetes.io/projected/c388cef9-08df-4344-872e-b8bdfa11b4fc-kube-api-access-wxvmg\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.511528 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-catalog-content\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.512123 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-utilities\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.512385 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-catalog-content\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.531764 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxvmg\" (UniqueName: \"kubernetes.io/projected/c388cef9-08df-4344-872e-b8bdfa11b4fc-kube-api-access-wxvmg\") pod \"certified-operators-jkg24\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.584650 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:32 crc kubenswrapper[4810]: I1203 05:55:32.938340 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkg24"] Dec 03 05:55:33 crc kubenswrapper[4810]: I1203 05:55:33.142346 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerStarted","Data":"6026a9f055507942c5763be47eced5f2339177c8c5c6a197de5b242f65d14b43"} Dec 03 05:55:33 crc kubenswrapper[4810]: I1203 05:55:33.142639 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerStarted","Data":"3b47ddbe7ba2b21e6cf89184dfbdd7f5aa4121c0ad8fdf6875bfea8f9a894b8b"} Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.154518 4810 generic.go:334] "Generic (PLEG): container finished" podID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerID="6026a9f055507942c5763be47eced5f2339177c8c5c6a197de5b242f65d14b43" exitCode=0 Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.154593 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerDied","Data":"6026a9f055507942c5763be47eced5f2339177c8c5c6a197de5b242f65d14b43"} Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.155041 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerStarted","Data":"9a09973dd2777c753d151e1e4f717aee190c29ffcdf11fa7e9d1e1038795a811"} Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.763899 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g"] Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.764661 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.778604 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-pbczc" Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.812895 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g"] Dec 03 05:55:34 crc kubenswrapper[4810]: I1203 05:55:34.954394 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5bkm\" (UniqueName: \"kubernetes.io/projected/ce9634a4-b14f-4972-a2f8-3bcea4db7a43-kube-api-access-k5bkm\") pod \"openstack-operator-controller-operator-7dd5c7bb7c-9ck7g\" (UID: \"ce9634a4-b14f-4972-a2f8-3bcea4db7a43\") " pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:55:35 crc kubenswrapper[4810]: I1203 05:55:35.055803 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5bkm\" (UniqueName: \"kubernetes.io/projected/ce9634a4-b14f-4972-a2f8-3bcea4db7a43-kube-api-access-k5bkm\") pod \"openstack-operator-controller-operator-7dd5c7bb7c-9ck7g\" (UID: \"ce9634a4-b14f-4972-a2f8-3bcea4db7a43\") " pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:55:35 crc kubenswrapper[4810]: I1203 05:55:35.088841 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5bkm\" (UniqueName: \"kubernetes.io/projected/ce9634a4-b14f-4972-a2f8-3bcea4db7a43-kube-api-access-k5bkm\") pod \"openstack-operator-controller-operator-7dd5c7bb7c-9ck7g\" (UID: \"ce9634a4-b14f-4972-a2f8-3bcea4db7a43\") " pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:55:35 crc kubenswrapper[4810]: I1203 05:55:35.161942 4810 generic.go:334] "Generic (PLEG): container finished" podID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerID="9a09973dd2777c753d151e1e4f717aee190c29ffcdf11fa7e9d1e1038795a811" exitCode=0 Dec 03 05:55:35 crc kubenswrapper[4810]: I1203 05:55:35.161999 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerDied","Data":"9a09973dd2777c753d151e1e4f717aee190c29ffcdf11fa7e9d1e1038795a811"} Dec 03 05:55:35 crc kubenswrapper[4810]: I1203 05:55:35.380652 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:55:35 crc kubenswrapper[4810]: I1203 05:55:35.830535 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g"] Dec 03 05:55:35 crc kubenswrapper[4810]: W1203 05:55:35.843004 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce9634a4_b14f_4972_a2f8_3bcea4db7a43.slice/crio-f3647217eaf5bc49bf15f6989a5ff6abfa21b8fd526548acc6f803db9f65e5e0 WatchSource:0}: Error finding container f3647217eaf5bc49bf15f6989a5ff6abfa21b8fd526548acc6f803db9f65e5e0: Status 404 returned error can't find the container with id f3647217eaf5bc49bf15f6989a5ff6abfa21b8fd526548acc6f803db9f65e5e0 Dec 03 05:55:36 crc kubenswrapper[4810]: I1203 05:55:36.173491 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerStarted","Data":"1fa26c87de0a7b5c325c573ad0f217dc2c1a0f0e0846e89e346b52612aa2ef0d"} Dec 03 05:55:36 crc kubenswrapper[4810]: I1203 05:55:36.174919 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" event={"ID":"ce9634a4-b14f-4972-a2f8-3bcea4db7a43","Type":"ContainerStarted","Data":"f3647217eaf5bc49bf15f6989a5ff6abfa21b8fd526548acc6f803db9f65e5e0"} Dec 03 05:55:36 crc kubenswrapper[4810]: I1203 05:55:36.198657 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jkg24" podStartSLOduration=1.810505806 podStartE2EDuration="4.198630605s" podCreationTimestamp="2025-12-03 05:55:32 +0000 UTC" firstStartedPulling="2025-12-03 05:55:33.144696556 +0000 UTC m=+857.080157397" lastFinishedPulling="2025-12-03 05:55:35.532821345 +0000 UTC m=+859.468282196" observedRunningTime="2025-12-03 05:55:36.192446612 +0000 UTC m=+860.127907453" watchObservedRunningTime="2025-12-03 05:55:36.198630605 +0000 UTC m=+860.134091446" Dec 03 05:55:42 crc kubenswrapper[4810]: I1203 05:55:42.227817 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" event={"ID":"ce9634a4-b14f-4972-a2f8-3bcea4db7a43","Type":"ContainerStarted","Data":"695536890d089256215b72f1b647290fe32bfaefba6a9bd1b67781dd7d29df0b"} Dec 03 05:55:42 crc kubenswrapper[4810]: I1203 05:55:42.228116 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:55:42 crc kubenswrapper[4810]: I1203 05:55:42.264747 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" podStartSLOduration=2.256252516 podStartE2EDuration="8.26471342s" podCreationTimestamp="2025-12-03 05:55:34 +0000 UTC" firstStartedPulling="2025-12-03 05:55:35.845079805 +0000 UTC m=+859.780540646" lastFinishedPulling="2025-12-03 05:55:41.853540699 +0000 UTC m=+865.789001550" observedRunningTime="2025-12-03 05:55:42.263663302 +0000 UTC m=+866.199124143" watchObservedRunningTime="2025-12-03 05:55:42.26471342 +0000 UTC m=+866.200174251" Dec 03 05:55:42 crc kubenswrapper[4810]: I1203 05:55:42.585011 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:42 crc kubenswrapper[4810]: I1203 05:55:42.585067 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:42 crc kubenswrapper[4810]: I1203 05:55:42.640204 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:43 crc kubenswrapper[4810]: I1203 05:55:43.304780 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:43 crc kubenswrapper[4810]: I1203 05:55:43.634707 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkg24"] Dec 03 05:55:45 crc kubenswrapper[4810]: I1203 05:55:45.249220 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jkg24" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="registry-server" containerID="cri-o://1fa26c87de0a7b5c325c573ad0f217dc2c1a0f0e0846e89e346b52612aa2ef0d" gracePeriod=2 Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.257696 4810 generic.go:334] "Generic (PLEG): container finished" podID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerID="1fa26c87de0a7b5c325c573ad0f217dc2c1a0f0e0846e89e346b52612aa2ef0d" exitCode=0 Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.257777 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerDied","Data":"1fa26c87de0a7b5c325c573ad0f217dc2c1a0f0e0846e89e346b52612aa2ef0d"} Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.690356 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.849818 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxvmg\" (UniqueName: \"kubernetes.io/projected/c388cef9-08df-4344-872e-b8bdfa11b4fc-kube-api-access-wxvmg\") pod \"c388cef9-08df-4344-872e-b8bdfa11b4fc\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.849983 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-utilities\") pod \"c388cef9-08df-4344-872e-b8bdfa11b4fc\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.850171 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-catalog-content\") pod \"c388cef9-08df-4344-872e-b8bdfa11b4fc\" (UID: \"c388cef9-08df-4344-872e-b8bdfa11b4fc\") " Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.853017 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-utilities" (OuterVolumeSpecName: "utilities") pod "c388cef9-08df-4344-872e-b8bdfa11b4fc" (UID: "c388cef9-08df-4344-872e-b8bdfa11b4fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.855599 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c388cef9-08df-4344-872e-b8bdfa11b4fc-kube-api-access-wxvmg" (OuterVolumeSpecName: "kube-api-access-wxvmg") pod "c388cef9-08df-4344-872e-b8bdfa11b4fc" (UID: "c388cef9-08df-4344-872e-b8bdfa11b4fc"). InnerVolumeSpecName "kube-api-access-wxvmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.893712 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c388cef9-08df-4344-872e-b8bdfa11b4fc" (UID: "c388cef9-08df-4344-872e-b8bdfa11b4fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.951798 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.951835 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxvmg\" (UniqueName: \"kubernetes.io/projected/c388cef9-08df-4344-872e-b8bdfa11b4fc-kube-api-access-wxvmg\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:46 crc kubenswrapper[4810]: I1203 05:55:46.951848 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c388cef9-08df-4344-872e-b8bdfa11b4fc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.267354 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkg24" event={"ID":"c388cef9-08df-4344-872e-b8bdfa11b4fc","Type":"ContainerDied","Data":"3b47ddbe7ba2b21e6cf89184dfbdd7f5aa4121c0ad8fdf6875bfea8f9a894b8b"} Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.267720 4810 scope.go:117] "RemoveContainer" containerID="1fa26c87de0a7b5c325c573ad0f217dc2c1a0f0e0846e89e346b52612aa2ef0d" Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.267457 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkg24" Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.287037 4810 scope.go:117] "RemoveContainer" containerID="9a09973dd2777c753d151e1e4f717aee190c29ffcdf11fa7e9d1e1038795a811" Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.306751 4810 scope.go:117] "RemoveContainer" containerID="6026a9f055507942c5763be47eced5f2339177c8c5c6a197de5b242f65d14b43" Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.327298 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkg24"] Dec 03 05:55:47 crc kubenswrapper[4810]: I1203 05:55:47.331066 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jkg24"] Dec 03 05:55:48 crc kubenswrapper[4810]: I1203 05:55:48.411864 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" path="/var/lib/kubelet/pods/c388cef9-08df-4344-872e-b8bdfa11b4fc/volumes" Dec 03 05:55:55 crc kubenswrapper[4810]: I1203 05:55:55.384971 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-9ck7g" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.674493 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv"] Dec 03 05:56:14 crc kubenswrapper[4810]: E1203 05:56:14.675459 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="extract-content" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.675478 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="extract-content" Dec 03 05:56:14 crc kubenswrapper[4810]: E1203 05:56:14.675489 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="registry-server" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.675495 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="registry-server" Dec 03 05:56:14 crc kubenswrapper[4810]: E1203 05:56:14.675506 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="extract-utilities" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.675514 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="extract-utilities" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.675664 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c388cef9-08df-4344-872e-b8bdfa11b4fc" containerName="registry-server" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.676482 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.678607 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-vxbbw" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.688019 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.689176 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.692051 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-7lhsm" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.695574 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.696882 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.708526 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.709845 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.718804 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.719961 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.720065 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.725784 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.758945 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-htvp8" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.759312 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-brthm" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.765946 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-z7xrj" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.775798 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.786117 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.807810 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.819802 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.820966 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.823776 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqc7c\" (UniqueName: \"kubernetes.io/projected/af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d-kube-api-access-mqc7c\") pod \"glance-operator-controller-manager-77987cd8cd-h9f8m\" (UID: \"af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.823827 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q49f\" (UniqueName: \"kubernetes.io/projected/93016dff-dd26-4447-bb03-244d51ba4154-kube-api-access-5q49f\") pod \"barbican-operator-controller-manager-7d9dfd778-k8whl\" (UID: \"93016dff-dd26-4447-bb03-244d51ba4154\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.823896 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc95j\" (UniqueName: \"kubernetes.io/projected/101772d6-6540-4695-a13f-ab0ce9a4bff2-kube-api-access-pc95j\") pod \"heat-operator-controller-manager-5f64f6f8bb-64665\" (UID: \"101772d6-6540-4695-a13f-ab0ce9a4bff2\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.823952 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrl9m\" (UniqueName: \"kubernetes.io/projected/4b9517ac-6af4-40eb-a049-7b778dcc5f10-kube-api-access-qrl9m\") pod \"cinder-operator-controller-manager-859b6ccc6-wbdfg\" (UID: \"4b9517ac-6af4-40eb-a049-7b778dcc5f10\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.823985 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfbrt\" (UniqueName: \"kubernetes.io/projected/7cf67e34-abd6-4424-95f4-7654ac840108-kube-api-access-sfbrt\") pod \"designate-operator-controller-manager-78b4bc895b-mpgcv\" (UID: \"7cf67e34-abd6-4424-95f4-7654ac840108\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.834715 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-fdct7"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.835719 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.837777 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-w668q" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.858648 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-w2cxx" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.858823 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.866809 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-fdct7"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.870516 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.891050 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.892033 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.905437 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.906993 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-lkrd8" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.908720 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.926381 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-2wl7t" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.927030 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrl9m\" (UniqueName: \"kubernetes.io/projected/4b9517ac-6af4-40eb-a049-7b778dcc5f10-kube-api-access-qrl9m\") pod \"cinder-operator-controller-manager-859b6ccc6-wbdfg\" (UID: \"4b9517ac-6af4-40eb-a049-7b778dcc5f10\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.927179 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfbrt\" (UniqueName: \"kubernetes.io/projected/7cf67e34-abd6-4424-95f4-7654ac840108-kube-api-access-sfbrt\") pod \"designate-operator-controller-manager-78b4bc895b-mpgcv\" (UID: \"7cf67e34-abd6-4424-95f4-7654ac840108\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.927326 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqc7c\" (UniqueName: \"kubernetes.io/projected/af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d-kube-api-access-mqc7c\") pod \"glance-operator-controller-manager-77987cd8cd-h9f8m\" (UID: \"af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.928041 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q49f\" (UniqueName: \"kubernetes.io/projected/93016dff-dd26-4447-bb03-244d51ba4154-kube-api-access-5q49f\") pod \"barbican-operator-controller-manager-7d9dfd778-k8whl\" (UID: \"93016dff-dd26-4447-bb03-244d51ba4154\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.928248 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzrq5\" (UniqueName: \"kubernetes.io/projected/7563ba12-e36d-48b2-8d43-57435fe85d0e-kube-api-access-jzrq5\") pod \"horizon-operator-controller-manager-68c6d99b8f-zclnr\" (UID: \"7563ba12-e36d-48b2-8d43-57435fe85d0e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.940478 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc95j\" (UniqueName: \"kubernetes.io/projected/101772d6-6540-4695-a13f-ab0ce9a4bff2-kube-api-access-pc95j\") pod \"heat-operator-controller-manager-5f64f6f8bb-64665\" (UID: \"101772d6-6540-4695-a13f-ab0ce9a4bff2\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.941850 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.960556 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l"] Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.970023 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q49f\" (UniqueName: \"kubernetes.io/projected/93016dff-dd26-4447-bb03-244d51ba4154-kube-api-access-5q49f\") pod \"barbican-operator-controller-manager-7d9dfd778-k8whl\" (UID: \"93016dff-dd26-4447-bb03-244d51ba4154\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.970986 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfbrt\" (UniqueName: \"kubernetes.io/projected/7cf67e34-abd6-4424-95f4-7654ac840108-kube-api-access-sfbrt\") pod \"designate-operator-controller-manager-78b4bc895b-mpgcv\" (UID: \"7cf67e34-abd6-4424-95f4-7654ac840108\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.973394 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc95j\" (UniqueName: \"kubernetes.io/projected/101772d6-6540-4695-a13f-ab0ce9a4bff2-kube-api-access-pc95j\") pod \"heat-operator-controller-manager-5f64f6f8bb-64665\" (UID: \"101772d6-6540-4695-a13f-ab0ce9a4bff2\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.986439 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqc7c\" (UniqueName: \"kubernetes.io/projected/af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d-kube-api-access-mqc7c\") pod \"glance-operator-controller-manager-77987cd8cd-h9f8m\" (UID: \"af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.991437 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrl9m\" (UniqueName: \"kubernetes.io/projected/4b9517ac-6af4-40eb-a049-7b778dcc5f10-kube-api-access-qrl9m\") pod \"cinder-operator-controller-manager-859b6ccc6-wbdfg\" (UID: \"4b9517ac-6af4-40eb-a049-7b778dcc5f10\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:14 crc kubenswrapper[4810]: I1203 05:56:14.997537 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.006794 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.010624 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.011949 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.013436 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-rprnj" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.039003 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.047294 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwhlt\" (UniqueName: \"kubernetes.io/projected/b36e1f29-d534-4c72-bcac-74ffc356c086-kube-api-access-jwhlt\") pod \"keystone-operator-controller-manager-7765d96ddf-b9hqh\" (UID: \"b36e1f29-d534-4c72-bcac-74ffc356c086\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.047337 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vc52\" (UniqueName: \"kubernetes.io/projected/3de263f5-25e9-41a0-a51d-37317cb65b16-kube-api-access-5vc52\") pod \"ironic-operator-controller-manager-6c548fd776-pds6l\" (UID: \"3de263f5-25e9-41a0-a51d-37317cb65b16\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.047380 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzrq5\" (UniqueName: \"kubernetes.io/projected/7563ba12-e36d-48b2-8d43-57435fe85d0e-kube-api-access-jzrq5\") pod \"horizon-operator-controller-manager-68c6d99b8f-zclnr\" (UID: \"7563ba12-e36d-48b2-8d43-57435fe85d0e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.047410 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krr28\" (UniqueName: \"kubernetes.io/projected/4517c669-2df4-40be-bcc1-0b44fa11838d-kube-api-access-krr28\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.047476 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.047495 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h7x2\" (UniqueName: \"kubernetes.io/projected/60612556-0f2a-4999-afb7-d71b32d18ef0-kube-api-access-2h7x2\") pod \"manila-operator-controller-manager-7c79b5df47-pzmcl\" (UID: \"60612556-0f2a-4999-afb7-d71b32d18ef0\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.056419 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.059596 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.067208 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.078629 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.079749 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.091324 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-fpsqf" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.095750 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.119102 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzrq5\" (UniqueName: \"kubernetes.io/projected/7563ba12-e36d-48b2-8d43-57435fe85d0e-kube-api-access-jzrq5\") pod \"horizon-operator-controller-manager-68c6d99b8f-zclnr\" (UID: \"7563ba12-e36d-48b2-8d43-57435fe85d0e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.122205 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.138312 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-knn5d" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153040 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153097 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h7x2\" (UniqueName: \"kubernetes.io/projected/60612556-0f2a-4999-afb7-d71b32d18ef0-kube-api-access-2h7x2\") pod \"manila-operator-controller-manager-7c79b5df47-pzmcl\" (UID: \"60612556-0f2a-4999-afb7-d71b32d18ef0\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153123 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwhlt\" (UniqueName: \"kubernetes.io/projected/b36e1f29-d534-4c72-bcac-74ffc356c086-kube-api-access-jwhlt\") pod \"keystone-operator-controller-manager-7765d96ddf-b9hqh\" (UID: \"b36e1f29-d534-4c72-bcac-74ffc356c086\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153150 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vc52\" (UniqueName: \"kubernetes.io/projected/3de263f5-25e9-41a0-a51d-37317cb65b16-kube-api-access-5vc52\") pod \"ironic-operator-controller-manager-6c548fd776-pds6l\" (UID: \"3de263f5-25e9-41a0-a51d-37317cb65b16\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153191 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nhsx\" (UniqueName: \"kubernetes.io/projected/68f5962b-45be-45a4-9822-eb23088d3d79-kube-api-access-7nhsx\") pod \"mariadb-operator-controller-manager-56bbcc9d85-km59q\" (UID: \"68f5962b-45be-45a4-9822-eb23088d3d79\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153218 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdt8h\" (UniqueName: \"kubernetes.io/projected/ac2ef7b5-018c-4775-8e14-106265e1c300-kube-api-access-mdt8h\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-vs4m6\" (UID: \"ac2ef7b5-018c-4775-8e14-106265e1c300\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.153245 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krr28\" (UniqueName: \"kubernetes.io/projected/4517c669-2df4-40be-bcc1-0b44fa11838d-kube-api-access-krr28\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:15 crc kubenswrapper[4810]: E1203 05:56:15.153690 4810 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:15 crc kubenswrapper[4810]: E1203 05:56:15.153761 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert podName:4517c669-2df4-40be-bcc1-0b44fa11838d nodeName:}" failed. No retries permitted until 2025-12-03 05:56:15.653728285 +0000 UTC m=+899.589189116 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert") pod "infra-operator-controller-manager-57548d458d-fdct7" (UID: "4517c669-2df4-40be-bcc1-0b44fa11838d") : secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.159418 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.222458 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krr28\" (UniqueName: \"kubernetes.io/projected/4517c669-2df4-40be-bcc1-0b44fa11838d-kube-api-access-krr28\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.223847 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h7x2\" (UniqueName: \"kubernetes.io/projected/60612556-0f2a-4999-afb7-d71b32d18ef0-kube-api-access-2h7x2\") pod \"manila-operator-controller-manager-7c79b5df47-pzmcl\" (UID: \"60612556-0f2a-4999-afb7-d71b32d18ef0\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.226795 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vc52\" (UniqueName: \"kubernetes.io/projected/3de263f5-25e9-41a0-a51d-37317cb65b16-kube-api-access-5vc52\") pod \"ironic-operator-controller-manager-6c548fd776-pds6l\" (UID: \"3de263f5-25e9-41a0-a51d-37317cb65b16\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.257701 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwhlt\" (UniqueName: \"kubernetes.io/projected/b36e1f29-d534-4c72-bcac-74ffc356c086-kube-api-access-jwhlt\") pod \"keystone-operator-controller-manager-7765d96ddf-b9hqh\" (UID: \"b36e1f29-d534-4c72-bcac-74ffc356c086\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.258461 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.269577 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nhsx\" (UniqueName: \"kubernetes.io/projected/68f5962b-45be-45a4-9822-eb23088d3d79-kube-api-access-7nhsx\") pod \"mariadb-operator-controller-manager-56bbcc9d85-km59q\" (UID: \"68f5962b-45be-45a4-9822-eb23088d3d79\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.269645 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdt8h\" (UniqueName: \"kubernetes.io/projected/ac2ef7b5-018c-4775-8e14-106265e1c300-kube-api-access-mdt8h\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-vs4m6\" (UID: \"ac2ef7b5-018c-4775-8e14-106265e1c300\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.271308 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.296336 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.320218 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.321528 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.324120 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.334359 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.345468 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.346979 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-vblnz" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.370563 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-msbz4" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.372490 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgdsm\" (UniqueName: \"kubernetes.io/projected/fa52e238-d025-4845-85bb-2787a7eb2ed7-kube-api-access-pgdsm\") pod \"nova-operator-controller-manager-697bc559fc-5lkc2\" (UID: \"fa52e238-d025-4845-85bb-2787a7eb2ed7\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.372577 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.372701 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbdmd\" (UniqueName: \"kubernetes.io/projected/660ec0b8-77cd-4cb2-9597-abca0770fbf9-kube-api-access-zbdmd\") pod \"octavia-operator-controller-manager-998648c74-z6tqk\" (UID: \"660ec0b8-77cd-4cb2-9597-abca0770fbf9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.375325 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdt8h\" (UniqueName: \"kubernetes.io/projected/ac2ef7b5-018c-4775-8e14-106265e1c300-kube-api-access-mdt8h\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-vs4m6\" (UID: \"ac2ef7b5-018c-4775-8e14-106265e1c300\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.376688 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nhsx\" (UniqueName: \"kubernetes.io/projected/68f5962b-45be-45a4-9822-eb23088d3d79-kube-api-access-7nhsx\") pod \"mariadb-operator-controller-manager-56bbcc9d85-km59q\" (UID: \"68f5962b-45be-45a4-9822-eb23088d3d79\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.379164 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.387232 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.387678 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hdr8r" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.396814 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.437318 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.461833 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.463363 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.479651 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zlrw\" (UniqueName: \"kubernetes.io/projected/7306b11b-b539-4542-af3f-a738880af67f-kube-api-access-6zlrw\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.479744 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.479811 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbdmd\" (UniqueName: \"kubernetes.io/projected/660ec0b8-77cd-4cb2-9597-abca0770fbf9-kube-api-access-zbdmd\") pod \"octavia-operator-controller-manager-998648c74-z6tqk\" (UID: \"660ec0b8-77cd-4cb2-9597-abca0770fbf9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.479934 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgdsm\" (UniqueName: \"kubernetes.io/projected/fa52e238-d025-4845-85bb-2787a7eb2ed7-kube-api-access-pgdsm\") pod \"nova-operator-controller-manager-697bc559fc-5lkc2\" (UID: \"fa52e238-d025-4845-85bb-2787a7eb2ed7\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.482795 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.484136 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.484614 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.489165 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-bzgnv" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.490204 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.491546 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.510299 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-g9t57" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.517320 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.519813 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.520282 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-274cd" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.525967 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.527044 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.529855 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.531512 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgdsm\" (UniqueName: \"kubernetes.io/projected/fa52e238-d025-4845-85bb-2787a7eb2ed7-kube-api-access-pgdsm\") pod \"nova-operator-controller-manager-697bc559fc-5lkc2\" (UID: \"fa52e238-d025-4845-85bb-2787a7eb2ed7\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.547682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbdmd\" (UniqueName: \"kubernetes.io/projected/660ec0b8-77cd-4cb2-9597-abca0770fbf9-kube-api-access-zbdmd\") pod \"octavia-operator-controller-manager-998648c74-z6tqk\" (UID: \"660ec0b8-77cd-4cb2-9597-abca0770fbf9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.548378 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-ppnfg" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.557319 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.566158 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.569666 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.573981 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-md8gl"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.575271 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.580626 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-l42b5" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.591202 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s57bh\" (UniqueName: \"kubernetes.io/projected/13befddb-d7f2-48bb-9d8c-8e61fbd8601a-kube-api-access-s57bh\") pod \"telemetry-operator-controller-manager-76cc84c6bb-2ktb9\" (UID: \"13befddb-d7f2-48bb-9d8c-8e61fbd8601a\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.591279 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zlrw\" (UniqueName: \"kubernetes.io/projected/7306b11b-b539-4542-af3f-a738880af67f-kube-api-access-6zlrw\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.591329 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.591385 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jw5z\" (UniqueName: \"kubernetes.io/projected/f8e032e8-8552-4d00-861c-798b4e59b83e-kube-api-access-2jw5z\") pod \"swift-operator-controller-manager-5f8c65bbfc-r42sr\" (UID: \"f8e032e8-8552-4d00-861c-798b4e59b83e\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.591412 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlnhm\" (UniqueName: \"kubernetes.io/projected/70b45fee-d617-41b2-a598-eae2815e19c6-kube-api-access-jlnhm\") pod \"ovn-operator-controller-manager-b6456fdb6-lvc9h\" (UID: \"70b45fee-d617-41b2-a598-eae2815e19c6\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.591440 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whr5x\" (UniqueName: \"kubernetes.io/projected/4a806c3b-f888-4612-b979-9f57fa2adabe-kube-api-access-whr5x\") pod \"placement-operator-controller-manager-78f8948974-2hkc4\" (UID: \"4a806c3b-f888-4612-b979-9f57fa2adabe\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:56:15 crc kubenswrapper[4810]: E1203 05:56:15.591881 4810 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:15 crc kubenswrapper[4810]: E1203 05:56:15.591926 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert podName:7306b11b-b539-4542-af3f-a738880af67f nodeName:}" failed. No retries permitted until 2025-12-03 05:56:16.091910818 +0000 UTC m=+900.027371659 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686v7m42" (UID: "7306b11b-b539-4542-af3f-a738880af67f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.598303 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.599892 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-md8gl"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.615383 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.616590 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.619921 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-lsf9h" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.646795 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.681912 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zlrw\" (UniqueName: \"kubernetes.io/projected/7306b11b-b539-4542-af3f-a738880af67f-kube-api-access-6zlrw\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:15 crc kubenswrapper[4810]: E1203 05:56:15.699100 4810 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:15 crc kubenswrapper[4810]: E1203 05:56:15.699249 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert podName:4517c669-2df4-40be-bcc1-0b44fa11838d nodeName:}" failed. No retries permitted until 2025-12-03 05:56:16.699176069 +0000 UTC m=+900.634636910 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert") pod "infra-operator-controller-manager-57548d458d-fdct7" (UID: "4517c669-2df4-40be-bcc1-0b44fa11838d") : secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.693578 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.703406 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jw5z\" (UniqueName: \"kubernetes.io/projected/f8e032e8-8552-4d00-861c-798b4e59b83e-kube-api-access-2jw5z\") pod \"swift-operator-controller-manager-5f8c65bbfc-r42sr\" (UID: \"f8e032e8-8552-4d00-861c-798b4e59b83e\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.703459 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlnhm\" (UniqueName: \"kubernetes.io/projected/70b45fee-d617-41b2-a598-eae2815e19c6-kube-api-access-jlnhm\") pod \"ovn-operator-controller-manager-b6456fdb6-lvc9h\" (UID: \"70b45fee-d617-41b2-a598-eae2815e19c6\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.703508 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whr5x\" (UniqueName: \"kubernetes.io/projected/4a806c3b-f888-4612-b979-9f57fa2adabe-kube-api-access-whr5x\") pod \"placement-operator-controller-manager-78f8948974-2hkc4\" (UID: \"4a806c3b-f888-4612-b979-9f57fa2adabe\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.703547 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bqng\" (UniqueName: \"kubernetes.io/projected/799ba5c1-1eae-4a8a-9177-454e5bcba2a5-kube-api-access-4bqng\") pod \"test-operator-controller-manager-5854674fcc-md8gl\" (UID: \"799ba5c1-1eae-4a8a-9177-454e5bcba2a5\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.703599 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p896p\" (UniqueName: \"kubernetes.io/projected/5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462-kube-api-access-p896p\") pod \"watcher-operator-controller-manager-769dc69bc-hw6bt\" (UID: \"5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.703766 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s57bh\" (UniqueName: \"kubernetes.io/projected/13befddb-d7f2-48bb-9d8c-8e61fbd8601a-kube-api-access-s57bh\") pod \"telemetry-operator-controller-manager-76cc84c6bb-2ktb9\" (UID: \"13befddb-d7f2-48bb-9d8c-8e61fbd8601a\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.754340 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.763215 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s57bh\" (UniqueName: \"kubernetes.io/projected/13befddb-d7f2-48bb-9d8c-8e61fbd8601a-kube-api-access-s57bh\") pod \"telemetry-operator-controller-manager-76cc84c6bb-2ktb9\" (UID: \"13befddb-d7f2-48bb-9d8c-8e61fbd8601a\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.768914 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jw5z\" (UniqueName: \"kubernetes.io/projected/f8e032e8-8552-4d00-861c-798b4e59b83e-kube-api-access-2jw5z\") pod \"swift-operator-controller-manager-5f8c65bbfc-r42sr\" (UID: \"f8e032e8-8552-4d00-861c-798b4e59b83e\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.770306 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whr5x\" (UniqueName: \"kubernetes.io/projected/4a806c3b-f888-4612-b979-9f57fa2adabe-kube-api-access-whr5x\") pod \"placement-operator-controller-manager-78f8948974-2hkc4\" (UID: \"4a806c3b-f888-4612-b979-9f57fa2adabe\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.779156 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.796667 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlnhm\" (UniqueName: \"kubernetes.io/projected/70b45fee-d617-41b2-a598-eae2815e19c6-kube-api-access-jlnhm\") pod \"ovn-operator-controller-manager-b6456fdb6-lvc9h\" (UID: \"70b45fee-d617-41b2-a598-eae2815e19c6\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.813755 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bqng\" (UniqueName: \"kubernetes.io/projected/799ba5c1-1eae-4a8a-9177-454e5bcba2a5-kube-api-access-4bqng\") pod \"test-operator-controller-manager-5854674fcc-md8gl\" (UID: \"799ba5c1-1eae-4a8a-9177-454e5bcba2a5\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.813825 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p896p\" (UniqueName: \"kubernetes.io/projected/5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462-kube-api-access-p896p\") pod \"watcher-operator-controller-manager-769dc69bc-hw6bt\" (UID: \"5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.840462 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p896p\" (UniqueName: \"kubernetes.io/projected/5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462-kube-api-access-p896p\") pod \"watcher-operator-controller-manager-769dc69bc-hw6bt\" (UID: \"5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.847128 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.852863 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.853312 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.855434 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bccf2" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.856454 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.856814 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.859862 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.860965 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bqng\" (UniqueName: \"kubernetes.io/projected/799ba5c1-1eae-4a8a-9177-454e5bcba2a5-kube-api-access-4bqng\") pod \"test-operator-controller-manager-5854674fcc-md8gl\" (UID: \"799ba5c1-1eae-4a8a-9177-454e5bcba2a5\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.899071 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.907127 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.918932 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7lr6\" (UniqueName: \"kubernetes.io/projected/a67e7123-c5dc-4392-9296-02892458e969-kube-api-access-w7lr6\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.918975 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.919028 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.927442 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.930862 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.932966 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.938416 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-fr4vc" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.943477 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d"] Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.970557 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:56:15 crc kubenswrapper[4810]: I1203 05:56:15.976675 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.032919 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47tcs\" (UniqueName: \"kubernetes.io/projected/2f985dd7-de9f-498f-a297-f0602a4888a4-kube-api-access-47tcs\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7nw2d\" (UID: \"2f985dd7-de9f-498f-a297-f0602a4888a4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.033008 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7lr6\" (UniqueName: \"kubernetes.io/projected/a67e7123-c5dc-4392-9296-02892458e969-kube-api-access-w7lr6\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.033030 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.033078 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.033212 4810 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.033247 4810 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.033274 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:16.533254425 +0000 UTC m=+900.468715266 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.033344 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:16.533315756 +0000 UTC m=+900.468776787 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "metrics-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.059622 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7lr6\" (UniqueName: \"kubernetes.io/projected/a67e7123-c5dc-4392-9296-02892458e969-kube-api-access-w7lr6\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.138645 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47tcs\" (UniqueName: \"kubernetes.io/projected/2f985dd7-de9f-498f-a297-f0602a4888a4-kube-api-access-47tcs\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7nw2d\" (UID: \"2f985dd7-de9f-498f-a297-f0602a4888a4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.139068 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.139227 4810 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.139287 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert podName:7306b11b-b539-4542-af3f-a738880af67f nodeName:}" failed. No retries permitted until 2025-12-03 05:56:17.139268962 +0000 UTC m=+901.074729803 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686v7m42" (UID: "7306b11b-b539-4542-af3f-a738880af67f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.171867 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47tcs\" (UniqueName: \"kubernetes.io/projected/2f985dd7-de9f-498f-a297-f0602a4888a4-kube-api-access-47tcs\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7nw2d\" (UID: \"2f985dd7-de9f-498f-a297-f0602a4888a4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.234633 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.547550 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.548161 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.547894 4810 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.548553 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:17.548528573 +0000 UTC m=+901.483989414 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "metrics-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.548431 4810 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.548831 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:17.54880343 +0000 UTC m=+901.484264271 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: I1203 05:56:16.753245 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.753551 4810 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:16 crc kubenswrapper[4810]: E1203 05:56:16.753656 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert podName:4517c669-2df4-40be-bcc1-0b44fa11838d nodeName:}" failed. No retries permitted until 2025-12-03 05:56:18.753628955 +0000 UTC m=+902.689090006 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert") pod "infra-operator-controller-manager-57548d458d-fdct7" (UID: "4517c669-2df4-40be-bcc1-0b44fa11838d") : secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: W1203 05:56:17.115484 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf3ca9ee_5379_49c7_9dd4_c4ab0b1c9d5d.slice/crio-a4a38f4257fec2856ea2a0042a2fc620ffb4415152a4c417153421c8cfca434a WatchSource:0}: Error finding container a4a38f4257fec2856ea2a0042a2fc620ffb4415152a4c417153421c8cfca434a: Status 404 returned error can't find the container with id a4a38f4257fec2856ea2a0042a2fc620ffb4415152a4c417153421c8cfca434a Dec 03 05:56:17 crc kubenswrapper[4810]: W1203 05:56:17.117074 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cf67e34_abd6_4424_95f4_7654ac840108.slice/crio-83fa210a7c642ae6bd4afc47e8affc2a59ad43a019d4d76560bb9e9a7122ca32 WatchSource:0}: Error finding container 83fa210a7c642ae6bd4afc47e8affc2a59ad43a019d4d76560bb9e9a7122ca32: Status 404 returned error can't find the container with id 83fa210a7c642ae6bd4afc47e8affc2a59ad43a019d4d76560bb9e9a7122ca32 Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.132045 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.140643 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.166371 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.166587 4810 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.166684 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert podName:7306b11b-b539-4542-af3f-a738880af67f nodeName:}" failed. No retries permitted until 2025-12-03 05:56:19.166659704 +0000 UTC m=+903.102120545 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686v7m42" (UID: "7306b11b-b539-4542-af3f-a738880af67f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.509935 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" event={"ID":"7cf67e34-abd6-4424-95f4-7654ac840108","Type":"ContainerStarted","Data":"83fa210a7c642ae6bd4afc47e8affc2a59ad43a019d4d76560bb9e9a7122ca32"} Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.515635 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" event={"ID":"af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d","Type":"ContainerStarted","Data":"a4a38f4257fec2856ea2a0042a2fc620ffb4415152a4c417153421c8cfca434a"} Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.554103 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt"] Dec 03 05:56:17 crc kubenswrapper[4810]: W1203 05:56:17.580098 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93016dff_dd26_4447_bb03_244d51ba4154.slice/crio-73764ac4991c320a437f77c2b9315daf1fc7a434282e7d391032cbe594de94ae WatchSource:0}: Error finding container 73764ac4991c320a437f77c2b9315daf1fc7a434282e7d391032cbe594de94ae: Status 404 returned error can't find the container with id 73764ac4991c320a437f77c2b9315daf1fc7a434282e7d391032cbe594de94ae Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.589657 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.592264 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.592332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.592548 4810 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.592612 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:19.592594363 +0000 UTC m=+903.528055204 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "webhook-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.592680 4810 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.592706 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:19.592699256 +0000 UTC m=+903.528160097 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "metrics-server-cert" not found Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.619493 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.637968 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.654504 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.665825 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.674070 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q"] Dec 03 05:56:17 crc kubenswrapper[4810]: W1203 05:56:17.694228 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac2ef7b5_018c_4775_8e14_106265e1c300.slice/crio-ad65b73fb0c93cf1cd50a4c9cfb586b10e7b24bfdcb23b7b33a2e2b2c1112683 WatchSource:0}: Error finding container ad65b73fb0c93cf1cd50a4c9cfb586b10e7b24bfdcb23b7b33a2e2b2c1112683: Status 404 returned error can't find the container with id ad65b73fb0c93cf1cd50a4c9cfb586b10e7b24bfdcb23b7b33a2e2b2c1112683 Dec 03 05:56:17 crc kubenswrapper[4810]: W1203 05:56:17.695156 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7563ba12_e36d_48b2_8d43_57435fe85d0e.slice/crio-5f84f01ef8ad5662028783c472af2d86ebfacad854cd71965cf7d34cff1051b5 WatchSource:0}: Error finding container 5f84f01ef8ad5662028783c472af2d86ebfacad854cd71965cf7d34cff1051b5: Status 404 returned error can't find the container with id 5f84f01ef8ad5662028783c472af2d86ebfacad854cd71965cf7d34cff1051b5 Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.703994 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.719613 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.734216 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665"] Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.737673 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2h7x2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-pzmcl_openstack-operators(60612556-0f2a-4999-afb7-d71b32d18ef0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.737877 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zbdmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-z6tqk_openstack-operators(660ec0b8-77cd-4cb2-9597-abca0770fbf9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.738854 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl"] Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.744798 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2h7x2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-pzmcl_openstack-operators(60612556-0f2a-4999-afb7-d71b32d18ef0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.746003 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" podUID="60612556-0f2a-4999-afb7-d71b32d18ef0" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.750405 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zbdmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-z6tqk_openstack-operators(660ec0b8-77cd-4cb2-9597-abca0770fbf9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.751921 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" podUID="660ec0b8-77cd-4cb2-9597-abca0770fbf9" Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.752362 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.758042 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.761829 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.767682 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-md8gl"] Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.793795 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr"] Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.796465 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pc95j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-64665_openstack-operators(101772d6-6540-4695-a13f-ab0ce9a4bff2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.796876 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-47tcs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-7nw2d_openstack-operators(2f985dd7-de9f-498f-a297-f0602a4888a4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.798035 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" podUID="2f985dd7-de9f-498f-a297-f0602a4888a4" Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.804233 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9"] Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.805393 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-whr5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-2hkc4_openstack-operators(4a806c3b-f888-4612-b979-9f57fa2adabe): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.805597 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2jw5z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-r42sr_openstack-operators(f8e032e8-8552-4d00-861c-798b4e59b83e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.806347 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pc95j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-64665_openstack-operators(101772d6-6540-4695-a13f-ab0ce9a4bff2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.807941 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" podUID="101772d6-6540-4695-a13f-ab0ce9a4bff2" Dec 03 05:56:17 crc kubenswrapper[4810]: I1203 05:56:17.814032 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4"] Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.817211 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s57bh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-2ktb9_openstack-operators(13befddb-d7f2-48bb-9d8c-8e61fbd8601a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.817341 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4bqng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-md8gl_openstack-operators(799ba5c1-1eae-4a8a-9177-454e5bcba2a5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.818106 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2jw5z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-r42sr_openstack-operators(f8e032e8-8552-4d00-861c-798b4e59b83e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.819310 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" podUID="f8e032e8-8552-4d00-861c-798b4e59b83e" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.820049 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4bqng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-md8gl_openstack-operators(799ba5c1-1eae-4a8a-9177-454e5bcba2a5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.822033 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" podUID="799ba5c1-1eae-4a8a-9177-454e5bcba2a5" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.823656 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s57bh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-2ktb9_openstack-operators(13befddb-d7f2-48bb-9d8c-8e61fbd8601a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 05:56:17 crc kubenswrapper[4810]: E1203 05:56:17.824920 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" podUID="13befddb-d7f2-48bb-9d8c-8e61fbd8601a" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.544896 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" event={"ID":"fa52e238-d025-4845-85bb-2787a7eb2ed7","Type":"ContainerStarted","Data":"2dfc08a6a6087f7741da464554fef7d38c3b1d43a7c65bd8e52fb5c14d8d6b2a"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.547462 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" event={"ID":"101772d6-6540-4695-a13f-ab0ce9a4bff2","Type":"ContainerStarted","Data":"d4df8eeb8373be34d3abcd9a79282934082ebce5b2ed961155b93c7f1d3f185b"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.550638 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" podUID="101772d6-6540-4695-a13f-ab0ce9a4bff2" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.552449 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" event={"ID":"660ec0b8-77cd-4cb2-9597-abca0770fbf9","Type":"ContainerStarted","Data":"6e9e6abedced9ba86568f8a121ab39c8f9d70919753ecebd5cd2c10cc0d2d92e"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.554400 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" podUID="660ec0b8-77cd-4cb2-9597-abca0770fbf9" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.560802 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" event={"ID":"3de263f5-25e9-41a0-a51d-37317cb65b16","Type":"ContainerStarted","Data":"21256a8cba5482b224ddbe289590637da4160d0b2028c88d508802166913e0b7"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.570177 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" event={"ID":"f8e032e8-8552-4d00-861c-798b4e59b83e","Type":"ContainerStarted","Data":"10971f4f07e949f2e872c7c826fe5fe85617927f912f143c077b3236b91e5496"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.574218 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" event={"ID":"70b45fee-d617-41b2-a598-eae2815e19c6","Type":"ContainerStarted","Data":"438ee6c467d8722e80e9748cb2640a3189cda916aa0064efe9f57215a0042810"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.575014 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" podUID="f8e032e8-8552-4d00-861c-798b4e59b83e" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.581225 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" event={"ID":"ac2ef7b5-018c-4775-8e14-106265e1c300","Type":"ContainerStarted","Data":"ad65b73fb0c93cf1cd50a4c9cfb586b10e7b24bfdcb23b7b33a2e2b2c1112683"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.585874 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" event={"ID":"799ba5c1-1eae-4a8a-9177-454e5bcba2a5","Type":"ContainerStarted","Data":"a7894d67f635cf3ee9564daed49bafc26214cacb14cca44093c5bdf2a661f1a7"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.590950 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" podUID="799ba5c1-1eae-4a8a-9177-454e5bcba2a5" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.614608 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" event={"ID":"5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462","Type":"ContainerStarted","Data":"e63f0a56b45882656bceabe09e180480399d10fa8f82d8830d84f8e559e0f338"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.626404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" event={"ID":"68f5962b-45be-45a4-9822-eb23088d3d79","Type":"ContainerStarted","Data":"24a27b37d97ca8d0a35dbdd77e1787a90d1f80d46dea71b63139a6cd7f99ae4e"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.630671 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" event={"ID":"4b9517ac-6af4-40eb-a049-7b778dcc5f10","Type":"ContainerStarted","Data":"d6d75eb6041778e71683676c17b463109d5ddcf478f21433e34cab74442d5410"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.637459 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" event={"ID":"b36e1f29-d534-4c72-bcac-74ffc356c086","Type":"ContainerStarted","Data":"ba453eb94c3c25913a1772a26b0f3d01e88003032015df91085b45d47f787110"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.642344 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" event={"ID":"13befddb-d7f2-48bb-9d8c-8e61fbd8601a","Type":"ContainerStarted","Data":"9c2e65cbc5ac7b9245cd3a38dc7bdac5ffd39578fdcd1ad32970ed1dfa029296"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.646894 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" podUID="13befddb-d7f2-48bb-9d8c-8e61fbd8601a" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.651267 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" event={"ID":"4a806c3b-f888-4612-b979-9f57fa2adabe","Type":"ContainerStarted","Data":"93354cff2b830d2ac525b40a7ece21ed6595809db7a0cd0c4bb7345f04b3f4e5"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.660256 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" event={"ID":"7563ba12-e36d-48b2-8d43-57435fe85d0e","Type":"ContainerStarted","Data":"5f84f01ef8ad5662028783c472af2d86ebfacad854cd71965cf7d34cff1051b5"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.667297 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" event={"ID":"2f985dd7-de9f-498f-a297-f0602a4888a4","Type":"ContainerStarted","Data":"3b76754fcf91244aaca32236e76c5c98f195eb3a8eb53b776407d7355835d01d"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.669306 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" podUID="2f985dd7-de9f-498f-a297-f0602a4888a4" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.671073 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" event={"ID":"60612556-0f2a-4999-afb7-d71b32d18ef0","Type":"ContainerStarted","Data":"56db93e3ff6aab8380ae5a2a68ab99d4d757733ba80d18f297396743a7669614"} Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.674521 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" event={"ID":"93016dff-dd26-4447-bb03-244d51ba4154","Type":"ContainerStarted","Data":"73764ac4991c320a437f77c2b9315daf1fc7a434282e7d391032cbe594de94ae"} Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.677501 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" podUID="60612556-0f2a-4999-afb7-d71b32d18ef0" Dec 03 05:56:18 crc kubenswrapper[4810]: I1203 05:56:18.825271 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.825489 4810 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:18 crc kubenswrapper[4810]: E1203 05:56:18.825572 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert podName:4517c669-2df4-40be-bcc1-0b44fa11838d nodeName:}" failed. No retries permitted until 2025-12-03 05:56:22.825532709 +0000 UTC m=+906.760993540 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert") pod "infra-operator-controller-manager-57548d458d-fdct7" (UID: "4517c669-2df4-40be-bcc1-0b44fa11838d") : secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: I1203 05:56:19.231550 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.231761 4810 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.231834 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert podName:7306b11b-b539-4542-af3f-a738880af67f nodeName:}" failed. No retries permitted until 2025-12-03 05:56:23.23180806 +0000 UTC m=+907.167268901 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686v7m42" (UID: "7306b11b-b539-4542-af3f-a738880af67f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: I1203 05:56:19.643085 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:19 crc kubenswrapper[4810]: I1203 05:56:19.643160 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.643337 4810 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.643399 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:23.643380011 +0000 UTC m=+907.578840852 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "webhook-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.643442 4810 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.643460 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:23.643453913 +0000 UTC m=+907.578914754 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "metrics-server-cert" not found Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.686322 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" podUID="2f985dd7-de9f-498f-a297-f0602a4888a4" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.689094 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" podUID="f8e032e8-8552-4d00-861c-798b4e59b83e" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.689316 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" podUID="60612556-0f2a-4999-afb7-d71b32d18ef0" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.689374 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" podUID="799ba5c1-1eae-4a8a-9177-454e5bcba2a5" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.689449 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" podUID="101772d6-6540-4695-a13f-ab0ce9a4bff2" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.689519 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" podUID="660ec0b8-77cd-4cb2-9597-abca0770fbf9" Dec 03 05:56:19 crc kubenswrapper[4810]: E1203 05:56:19.689813 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" podUID="13befddb-d7f2-48bb-9d8c-8e61fbd8601a" Dec 03 05:56:22 crc kubenswrapper[4810]: I1203 05:56:22.826285 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:22 crc kubenswrapper[4810]: E1203 05:56:22.826495 4810 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:22 crc kubenswrapper[4810]: E1203 05:56:22.826837 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert podName:4517c669-2df4-40be-bcc1-0b44fa11838d nodeName:}" failed. No retries permitted until 2025-12-03 05:56:30.826812547 +0000 UTC m=+914.762273388 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert") pod "infra-operator-controller-manager-57548d458d-fdct7" (UID: "4517c669-2df4-40be-bcc1-0b44fa11838d") : secret "infra-operator-webhook-server-cert" not found Dec 03 05:56:23 crc kubenswrapper[4810]: I1203 05:56:23.233138 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:23 crc kubenswrapper[4810]: E1203 05:56:23.233412 4810 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:23 crc kubenswrapper[4810]: E1203 05:56:23.233545 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert podName:7306b11b-b539-4542-af3f-a738880af67f nodeName:}" failed. No retries permitted until 2025-12-03 05:56:31.233512519 +0000 UTC m=+915.168973560 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686v7m42" (UID: "7306b11b-b539-4542-af3f-a738880af67f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 05:56:23 crc kubenswrapper[4810]: I1203 05:56:23.643588 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:23 crc kubenswrapper[4810]: E1203 05:56:23.643867 4810 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 05:56:23 crc kubenswrapper[4810]: I1203 05:56:23.643971 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:23 crc kubenswrapper[4810]: E1203 05:56:23.644084 4810 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 05:56:23 crc kubenswrapper[4810]: E1203 05:56:23.644238 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:31.644050613 +0000 UTC m=+915.579511644 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "metrics-server-cert" not found Dec 03 05:56:23 crc kubenswrapper[4810]: E1203 05:56:23.644260 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:31.644249438 +0000 UTC m=+915.579710519 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "webhook-server-cert" not found Dec 03 05:56:25 crc kubenswrapper[4810]: I1203 05:56:25.678087 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:56:25 crc kubenswrapper[4810]: I1203 05:56:25.678177 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:56:30 crc kubenswrapper[4810]: I1203 05:56:30.908558 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:30 crc kubenswrapper[4810]: I1203 05:56:30.924540 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4517c669-2df4-40be-bcc1-0b44fa11838d-cert\") pod \"infra-operator-controller-manager-57548d458d-fdct7\" (UID: \"4517c669-2df4-40be-bcc1-0b44fa11838d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.070267 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-w2cxx" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.079637 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.243407 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.249128 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7306b11b-b539-4542-af3f-a738880af67f-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686v7m42\" (UID: \"7306b11b-b539-4542-af3f-a738880af67f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.392467 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hdr8r" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.401508 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.653369 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:31 crc kubenswrapper[4810]: I1203 05:56:31.653426 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:31 crc kubenswrapper[4810]: E1203 05:56:31.653640 4810 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 05:56:31 crc kubenswrapper[4810]: E1203 05:56:31.653638 4810 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 05:56:31 crc kubenswrapper[4810]: E1203 05:56:31.653717 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:47.653696043 +0000 UTC m=+931.589156904 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "webhook-server-cert" not found Dec 03 05:56:31 crc kubenswrapper[4810]: E1203 05:56:31.653784 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs podName:a67e7123-c5dc-4392-9296-02892458e969 nodeName:}" failed. No retries permitted until 2025-12-03 05:56:47.653752965 +0000 UTC m=+931.589213826 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-kq5vk" (UID: "a67e7123-c5dc-4392-9296-02892458e969") : secret "metrics-server-cert" not found Dec 03 05:56:32 crc kubenswrapper[4810]: E1203 05:56:32.235520 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 03 05:56:32 crc kubenswrapper[4810]: E1203 05:56:32.235828 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwhlt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-b9hqh_openstack-operators(b36e1f29-d534-4c72-bcac-74ffc356c086): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:32 crc kubenswrapper[4810]: E1203 05:56:32.767618 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 03 05:56:32 crc kubenswrapper[4810]: E1203 05:56:32.767848 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jlnhm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-lvc9h_openstack-operators(70b45fee-d617-41b2-a598-eae2815e19c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:33 crc kubenswrapper[4810]: E1203 05:56:33.631451 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 03 05:56:33 crc kubenswrapper[4810]: E1203 05:56:33.631922 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc52,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-pds6l_openstack-operators(3de263f5-25e9-41a0-a51d-37317cb65b16): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:34 crc kubenswrapper[4810]: E1203 05:56:34.678071 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 03 05:56:34 crc kubenswrapper[4810]: E1203 05:56:34.678498 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mdt8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-vs4m6_openstack-operators(ac2ef7b5-018c-4775-8e14-106265e1c300): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:42 crc kubenswrapper[4810]: E1203 05:56:42.455153 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 03 05:56:42 crc kubenswrapper[4810]: E1203 05:56:42.455938 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pgdsm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-5lkc2_openstack-operators(fa52e238-d025-4845-85bb-2787a7eb2ed7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:42 crc kubenswrapper[4810]: I1203 05:56:42.969888 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-fdct7"] Dec 03 05:56:45 crc kubenswrapper[4810]: E1203 05:56:45.008805 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 03 05:56:45 crc kubenswrapper[4810]: E1203 05:56:45.009540 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2h7x2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-pzmcl_openstack-operators(60612556-0f2a-4999-afb7-d71b32d18ef0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:45 crc kubenswrapper[4810]: E1203 05:56:45.799504 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 03 05:56:45 crc kubenswrapper[4810]: E1203 05:56:45.800472 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s57bh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-2ktb9_openstack-operators(13befddb-d7f2-48bb-9d8c-8e61fbd8601a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:46 crc kubenswrapper[4810]: I1203 05:56:46.259341 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42"] Dec 03 05:56:46 crc kubenswrapper[4810]: I1203 05:56:46.946266 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" event={"ID":"4517c669-2df4-40be-bcc1-0b44fa11838d","Type":"ContainerStarted","Data":"f007da7eda2e4f04714b6311516bcf96d275ba0bce652f278e43392fa9768e99"} Dec 03 05:56:47 crc kubenswrapper[4810]: E1203 05:56:47.282655 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 03 05:56:47 crc kubenswrapper[4810]: E1203 05:56:47.282987 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-47tcs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-7nw2d_openstack-operators(2f985dd7-de9f-498f-a297-f0602a4888a4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:47 crc kubenswrapper[4810]: E1203 05:56:47.284188 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" podUID="2f985dd7-de9f-498f-a297-f0602a4888a4" Dec 03 05:56:47 crc kubenswrapper[4810]: I1203 05:56:47.694945 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:47 crc kubenswrapper[4810]: I1203 05:56:47.694990 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:47 crc kubenswrapper[4810]: I1203 05:56:47.703948 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:47 crc kubenswrapper[4810]: I1203 05:56:47.720940 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a67e7123-c5dc-4392-9296-02892458e969-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-kq5vk\" (UID: \"a67e7123-c5dc-4392-9296-02892458e969\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:48 crc kubenswrapper[4810]: I1203 05:56:48.009546 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bccf2" Dec 03 05:56:48 crc kubenswrapper[4810]: I1203 05:56:48.018523 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:48 crc kubenswrapper[4810]: E1203 05:56:48.653643 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 05:56:48 crc kubenswrapper[4810]: E1203 05:56:48.653850 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-whr5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-2hkc4_openstack-operators(4a806c3b-f888-4612-b979-9f57fa2adabe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:56:48 crc kubenswrapper[4810]: E1203 05:56:48.655056 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" podUID="4a806c3b-f888-4612-b979-9f57fa2adabe" Dec 03 05:56:48 crc kubenswrapper[4810]: I1203 05:56:48.961831 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" event={"ID":"af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d","Type":"ContainerStarted","Data":"5b5d8c22a52b4f788c2086d60d3508383eb4d364f18b13f2b5f0637b7e510acf"} Dec 03 05:56:48 crc kubenswrapper[4810]: I1203 05:56:48.964239 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" event={"ID":"7306b11b-b539-4542-af3f-a738880af67f","Type":"ContainerStarted","Data":"f646660fca27c65efd91fae635e65a27a5febc08113c03de6b2c20ded02afd6b"} Dec 03 05:56:49 crc kubenswrapper[4810]: I1203 05:56:49.895683 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk"] Dec 03 05:56:49 crc kubenswrapper[4810]: I1203 05:56:49.978538 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" event={"ID":"a67e7123-c5dc-4392-9296-02892458e969","Type":"ContainerStarted","Data":"206da696920501c69fb40c13a7546a4f35bdd1d4a70e7894f7d42554a5526589"} Dec 03 05:56:50 crc kubenswrapper[4810]: I1203 05:56:50.011355 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" event={"ID":"5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462","Type":"ContainerStarted","Data":"0692f785e6c7a99097658cd898a059b0988c35382b048add56dd8e0cdb30c434"} Dec 03 05:56:50 crc kubenswrapper[4810]: I1203 05:56:50.013313 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" event={"ID":"93016dff-dd26-4447-bb03-244d51ba4154","Type":"ContainerStarted","Data":"264fef3a0ba619dad1971e257deef59c572d4757556991ec5661e637a4afc157"} Dec 03 05:56:50 crc kubenswrapper[4810]: I1203 05:56:50.014904 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" event={"ID":"68f5962b-45be-45a4-9822-eb23088d3d79","Type":"ContainerStarted","Data":"85183f9123e2b98a4624bc94d1db9a69e4c79773a7f151c1efb63d02f335c028"} Dec 03 05:56:50 crc kubenswrapper[4810]: I1203 05:56:50.016104 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" event={"ID":"7563ba12-e36d-48b2-8d43-57435fe85d0e","Type":"ContainerStarted","Data":"fe5afdb8cdc5b8ffc03a6f85259c544026e83c0217e7cccc2001947bbec5894c"} Dec 03 05:56:50 crc kubenswrapper[4810]: E1203 05:56:50.586375 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" podUID="b36e1f29-d534-4c72-bcac-74ffc356c086" Dec 03 05:56:50 crc kubenswrapper[4810]: E1203 05:56:50.679715 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" podUID="70b45fee-d617-41b2-a598-eae2815e19c6" Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.031021 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" event={"ID":"4b9517ac-6af4-40eb-a049-7b778dcc5f10","Type":"ContainerStarted","Data":"3d6c53107739da80d8a28fb21b3ba686e4d38716da95f1bd6da900df56b944a1"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.033304 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" event={"ID":"70b45fee-d617-41b2-a598-eae2815e19c6","Type":"ContainerStarted","Data":"f4bf0107f7ab701775a7a0467ef6929e664b7f2244638cc496dfb27b566646cd"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.037231 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" event={"ID":"7cf67e34-abd6-4424-95f4-7654ac840108","Type":"ContainerStarted","Data":"e34d75baa65b8e534efb54bb5bd9520294a478889913f4fb735b59924961b6a4"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.051141 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" event={"ID":"101772d6-6540-4695-a13f-ab0ce9a4bff2","Type":"ContainerStarted","Data":"d5a4d3d295ce4f4fa63dcaee6154e6228c09faecea23426feab396ec6bce741f"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.056540 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" event={"ID":"660ec0b8-77cd-4cb2-9597-abca0770fbf9","Type":"ContainerStarted","Data":"59eaefb70801c74d62aaf0d33a12b839184f1243514bfe379e64da41bc4953a5"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.058464 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" event={"ID":"f8e032e8-8552-4d00-861c-798b4e59b83e","Type":"ContainerStarted","Data":"a51c9b4bb26205365fba1248db6305e1dbc9c0eeb83b4172bd7ccab46805b4ea"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.067142 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" event={"ID":"68f5962b-45be-45a4-9822-eb23088d3d79","Type":"ContainerStarted","Data":"f4aae2b0116ffcc9ec2edb0e0cef1b55b24db1c02244c8b13626d328362323dd"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.068099 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.071825 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" event={"ID":"b36e1f29-d534-4c72-bcac-74ffc356c086","Type":"ContainerStarted","Data":"e37efe177b991955ba21d37114c096e81b26e042696f8dee4d4bb2dbb4b268ca"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.100669 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" podStartSLOduration=19.445936858 podStartE2EDuration="37.100650446s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.653891141 +0000 UTC m=+901.589351982" lastFinishedPulling="2025-12-03 05:56:35.308604729 +0000 UTC m=+919.244065570" observedRunningTime="2025-12-03 05:56:51.09890142 +0000 UTC m=+935.034362281" watchObservedRunningTime="2025-12-03 05:56:51.100650446 +0000 UTC m=+935.036111287" Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.106422 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" event={"ID":"4517c669-2df4-40be-bcc1-0b44fa11838d","Type":"ContainerStarted","Data":"af5dcc5db3d096681ed09dda49e8eb7fad26c90c79ffbad45a52d33320589c3c"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.119250 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" event={"ID":"af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d","Type":"ContainerStarted","Data":"24a8508aadd59d7c56bfd14e01ead506cb613dbd6ce7e7d579d128f9ea51e233"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.120197 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.141967 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" event={"ID":"799ba5c1-1eae-4a8a-9177-454e5bcba2a5","Type":"ContainerStarted","Data":"d395ee803ef16bc2fabea8e4624bf526f81ed9e4fcd77414ca223710cb0ab179"} Dec 03 05:56:51 crc kubenswrapper[4810]: I1203 05:56:51.172617 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" podStartSLOduration=18.981424377 podStartE2EDuration="37.17259184s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.117424965 +0000 UTC m=+901.052885806" lastFinishedPulling="2025-12-03 05:56:35.308592428 +0000 UTC m=+919.244053269" observedRunningTime="2025-12-03 05:56:51.169070048 +0000 UTC m=+935.104530889" watchObservedRunningTime="2025-12-03 05:56:51.17259184 +0000 UTC m=+935.108052681" Dec 03 05:56:52 crc kubenswrapper[4810]: E1203 05:56:52.756583 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" podUID="ac2ef7b5-018c-4775-8e14-106265e1c300" Dec 03 05:56:52 crc kubenswrapper[4810]: E1203 05:56:52.793157 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" podUID="3de263f5-25e9-41a0-a51d-37317cb65b16" Dec 03 05:56:52 crc kubenswrapper[4810]: E1203 05:56:52.799969 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" podUID="fa52e238-d025-4845-85bb-2787a7eb2ed7" Dec 03 05:56:52 crc kubenswrapper[4810]: E1203 05:56:52.801073 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" podUID="13befddb-d7f2-48bb-9d8c-8e61fbd8601a" Dec 03 05:56:52 crc kubenswrapper[4810]: E1203 05:56:52.801586 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" podUID="60612556-0f2a-4999-afb7-d71b32d18ef0" Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.157803 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" event={"ID":"13befddb-d7f2-48bb-9d8c-8e61fbd8601a","Type":"ContainerStarted","Data":"7123703e6b8d181d0979527614aa149b591fdb1c2f890bb0bcf5ba391c7b5f78"} Dec 03 05:56:53 crc kubenswrapper[4810]: E1203 05:56:53.159959 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" podUID="13befddb-d7f2-48bb-9d8c-8e61fbd8601a" Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.160858 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" event={"ID":"3de263f5-25e9-41a0-a51d-37317cb65b16","Type":"ContainerStarted","Data":"8bbc539f7349e1ed82adb674f33a27f61f3ddd08967bb7c5c6e4e40d3509f8f5"} Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.163661 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" event={"ID":"93016dff-dd26-4447-bb03-244d51ba4154","Type":"ContainerStarted","Data":"f7b8e428349bf7b617389ff48e6260f968b32c05cc5c8d8e412591d6414e469f"} Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.163880 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.167093 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" event={"ID":"ac2ef7b5-018c-4775-8e14-106265e1c300","Type":"ContainerStarted","Data":"535432f6294f6479587263a64e1443bf5b9413827f549d7da23b38f11f3c9a7c"} Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.169146 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" event={"ID":"fa52e238-d025-4845-85bb-2787a7eb2ed7","Type":"ContainerStarted","Data":"948c9675502891196d83074e0628800ecf029c0cfddb86b03e5c8d9c8f984e0b"} Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.171057 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" event={"ID":"60612556-0f2a-4999-afb7-d71b32d18ef0","Type":"ContainerStarted","Data":"2898e332f2679e18678674e84457095d810da882300b42e6c54c39a82c158458"} Dec 03 05:56:53 crc kubenswrapper[4810]: E1203 05:56:53.174164 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9\\\"\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" podUID="60612556-0f2a-4999-afb7-d71b32d18ef0" Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.176132 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9f8m" Dec 03 05:56:53 crc kubenswrapper[4810]: I1203 05:56:53.317579 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" podStartSLOduration=22.151560858 podStartE2EDuration="39.317555822s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.607295931 +0000 UTC m=+901.542756762" lastFinishedPulling="2025-12-03 05:56:34.773290885 +0000 UTC m=+918.708751726" observedRunningTime="2025-12-03 05:56:53.316543035 +0000 UTC m=+937.252003876" watchObservedRunningTime="2025-12-03 05:56:53.317555822 +0000 UTC m=+937.253016663" Dec 03 05:56:54 crc kubenswrapper[4810]: I1203 05:56:54.195317 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" event={"ID":"7563ba12-e36d-48b2-8d43-57435fe85d0e","Type":"ContainerStarted","Data":"769b1692ebf59897e10f74b34afdc252786cdddeef317ddcac13795804840d4d"} Dec 03 05:56:54 crc kubenswrapper[4810]: I1203 05:56:54.196499 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:54 crc kubenswrapper[4810]: I1203 05:56:54.208546 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" Dec 03 05:56:54 crc kubenswrapper[4810]: I1203 05:56:54.208637 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" Dec 03 05:56:54 crc kubenswrapper[4810]: I1203 05:56:54.231177 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zclnr" podStartSLOduration=22.660558556 podStartE2EDuration="40.231160535s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.737276201 +0000 UTC m=+901.672737032" lastFinishedPulling="2025-12-03 05:56:35.30787817 +0000 UTC m=+919.243339011" observedRunningTime="2025-12-03 05:56:54.227575631 +0000 UTC m=+938.163036472" watchObservedRunningTime="2025-12-03 05:56:54.231160535 +0000 UTC m=+938.166621376" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.203010 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" event={"ID":"660ec0b8-77cd-4cb2-9597-abca0770fbf9","Type":"ContainerStarted","Data":"687b0ceee22c6597e58ed7990595fce95cceb462000875b234a288e25a6b90b1"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.204428 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.209757 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.211926 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" event={"ID":"4b9517ac-6af4-40eb-a049-7b778dcc5f10","Type":"ContainerStarted","Data":"2bfb3ff90cb8de551d8049ec42ce005982bbe862be982458877b6ff61350c18a"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.214579 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.215787 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.217707 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" event={"ID":"799ba5c1-1eae-4a8a-9177-454e5bcba2a5","Type":"ContainerStarted","Data":"8e2d594250e9463af629a2f30ba7894c0bbf3561484a12dea5c1f2fbefce0473"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.217970 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.221626 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.222832 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" event={"ID":"fa52e238-d025-4845-85bb-2787a7eb2ed7","Type":"ContainerStarted","Data":"9248db509bc8c94d00a3a72cddc42a7cf11c0fc6e5890758d66ce8d342957588"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.222993 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.224976 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" event={"ID":"70b45fee-d617-41b2-a598-eae2815e19c6","Type":"ContainerStarted","Data":"fe7d50c5f3350754c981cb6a48f76612979664b9424717bfceed8e87c89485b6"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.225796 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.227635 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" event={"ID":"b36e1f29-d534-4c72-bcac-74ffc356c086","Type":"ContainerStarted","Data":"5736bfca4b9fb95c76d232d4b9db3e149b1f75e5fb64ee0239871b9fae0ee9d8"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.228101 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.230837 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" event={"ID":"f8e032e8-8552-4d00-861c-798b4e59b83e","Type":"ContainerStarted","Data":"fca4b077ab42b45cfa878b203a716662b0873a00c46f42b6fd4210ec2ec19fc4"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.231638 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.234676 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" event={"ID":"7306b11b-b539-4542-af3f-a738880af67f","Type":"ContainerStarted","Data":"85b602504bc4b6e85f72dc69a53a3c84749c5d261c113cd0c76583e18722fa02"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.236558 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" event={"ID":"4517c669-2df4-40be-bcc1-0b44fa11838d","Type":"ContainerStarted","Data":"62d309e1382013201023cda13726834349f49772218f6ad7d41388cb670165d2"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.237910 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.241931 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.242035 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" event={"ID":"101772d6-6540-4695-a13f-ab0ce9a4bff2","Type":"ContainerStarted","Data":"21c937bf412c9d0aca8a28c0e4b68b5e59577ef79bd92fcd0f3d8b6149e00c76"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.242190 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.244820 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" event={"ID":"3de263f5-25e9-41a0-a51d-37317cb65b16","Type":"ContainerStarted","Data":"ee6bcd111022a6a10e3ffcd8bd39802bafdc159dec81073151e45518d6f1c023"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.244912 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.247948 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.250770 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" event={"ID":"ac2ef7b5-018c-4775-8e14-106265e1c300","Type":"ContainerStarted","Data":"945408aa456cec1589222ff0a5a747411681e198dd5b82c75dad082d237e61af"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.250899 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.253171 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" event={"ID":"7cf67e34-abd6-4424-95f4-7654ac840108","Type":"ContainerStarted","Data":"ffccaf344f287950476bdeb986ced44790fe6babeb11af4aaf951d5761637af4"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.253241 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.257320 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-z6tqk" podStartSLOduration=9.312992183 podStartE2EDuration="40.257297571s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.737701902 +0000 UTC m=+901.673162743" lastFinishedPulling="2025-12-03 05:56:48.68200729 +0000 UTC m=+932.617468131" observedRunningTime="2025-12-03 05:56:55.239201205 +0000 UTC m=+939.174662046" watchObservedRunningTime="2025-12-03 05:56:55.257297571 +0000 UTC m=+939.192758412" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.261161 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.261254 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.271190 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" event={"ID":"a67e7123-c5dc-4392-9296-02892458e969","Type":"ContainerStarted","Data":"0ba04188cd379a7ee75ea8b81c40e42bf08686b886f1a95ff6ce23b7c46a227f"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.271601 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.276071 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-md8gl" podStartSLOduration=9.451117975 podStartE2EDuration="40.276052955s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.816969524 +0000 UTC m=+901.752430355" lastFinishedPulling="2025-12-03 05:56:48.641904494 +0000 UTC m=+932.577365335" observedRunningTime="2025-12-03 05:56:55.271030633 +0000 UTC m=+939.206491474" watchObservedRunningTime="2025-12-03 05:56:55.276052955 +0000 UTC m=+939.211513796" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.276255 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" event={"ID":"5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462","Type":"ContainerStarted","Data":"9d8956fc0968e8b227bf9f78f8341c39bd53173bcc9230f58397fa8e0a416c80"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.279614 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.290853 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.299413 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-r42sr" podStartSLOduration=8.77787966 podStartE2EDuration="40.29939313s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.805521022 +0000 UTC m=+901.740981863" lastFinishedPulling="2025-12-03 05:56:49.327034492 +0000 UTC m=+933.262495333" observedRunningTime="2025-12-03 05:56:55.296720429 +0000 UTC m=+939.232181270" watchObservedRunningTime="2025-12-03 05:56:55.29939313 +0000 UTC m=+939.234853971" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.303796 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" event={"ID":"4a806c3b-f888-4612-b979-9f57fa2adabe","Type":"ContainerStarted","Data":"91c4020df375979b70dae8e74ad9acf64d1cc778c8cfe540b4051052f0128b79"} Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.379928 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" podStartSLOduration=3.696157612 podStartE2EDuration="40.379894539s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.663423712 +0000 UTC m=+901.598884563" lastFinishedPulling="2025-12-03 05:56:54.347160649 +0000 UTC m=+938.282621490" observedRunningTime="2025-12-03 05:56:55.354794778 +0000 UTC m=+939.290255619" watchObservedRunningTime="2025-12-03 05:56:55.379894539 +0000 UTC m=+939.315355380" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.403011 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" podStartSLOduration=4.766202657 podStartE2EDuration="41.402984847s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.677464183 +0000 UTC m=+901.612925024" lastFinishedPulling="2025-12-03 05:56:54.314246373 +0000 UTC m=+938.249707214" observedRunningTime="2025-12-03 05:56:55.40233052 +0000 UTC m=+939.337791351" watchObservedRunningTime="2025-12-03 05:56:55.402984847 +0000 UTC m=+939.338445688" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.469632 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" podStartSLOduration=3.7831388009999998 podStartE2EDuration="40.469605281s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.643725262 +0000 UTC m=+901.579186103" lastFinishedPulling="2025-12-03 05:56:54.330191742 +0000 UTC m=+938.265652583" observedRunningTime="2025-12-03 05:56:55.450344664 +0000 UTC m=+939.385805505" watchObservedRunningTime="2025-12-03 05:56:55.469605281 +0000 UTC m=+939.405066132" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.531223 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-km59q" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.557131 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wbdfg" podStartSLOduration=23.909077604 podStartE2EDuration="41.557101105s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.663570236 +0000 UTC m=+901.599031077" lastFinishedPulling="2025-12-03 05:56:35.311593717 +0000 UTC m=+919.247054578" observedRunningTime="2025-12-03 05:56:55.538720291 +0000 UTC m=+939.474181152" watchObservedRunningTime="2025-12-03 05:56:55.557101105 +0000 UTC m=+939.492561946" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.603144 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fdct7" podStartSLOduration=38.603130493 podStartE2EDuration="41.603115516s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:46.600280172 +0000 UTC m=+930.535741013" lastFinishedPulling="2025-12-03 05:56:49.600265195 +0000 UTC m=+933.535726036" observedRunningTime="2025-12-03 05:56:55.581317862 +0000 UTC m=+939.516778693" watchObservedRunningTime="2025-12-03 05:56:55.603115516 +0000 UTC m=+939.538576347" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.609287 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-hw6bt" podStartSLOduration=23.44126062 podStartE2EDuration="40.609272278s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.60689649 +0000 UTC m=+901.542357331" lastFinishedPulling="2025-12-03 05:56:34.774908138 +0000 UTC m=+918.710368989" observedRunningTime="2025-12-03 05:56:55.608241501 +0000 UTC m=+939.543702342" watchObservedRunningTime="2025-12-03 05:56:55.609272278 +0000 UTC m=+939.544733109" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.676991 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.677052 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.748063 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" podStartSLOduration=40.748040212 podStartE2EDuration="40.748040212s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:56:55.745094294 +0000 UTC m=+939.680555125" watchObservedRunningTime="2025-12-03 05:56:55.748040212 +0000 UTC m=+939.683501053" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.804943 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-mpgcv" podStartSLOduration=23.615285066 podStartE2EDuration="41.804920699s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.121660527 +0000 UTC m=+901.057121368" lastFinishedPulling="2025-12-03 05:56:35.31129616 +0000 UTC m=+919.246757001" observedRunningTime="2025-12-03 05:56:55.803882812 +0000 UTC m=+939.739343663" watchObservedRunningTime="2025-12-03 05:56:55.804920699 +0000 UTC m=+939.740381540" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.838331 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" podStartSLOduration=4.181476763 podStartE2EDuration="40.838312888s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.736830389 +0000 UTC m=+901.672291230" lastFinishedPulling="2025-12-03 05:56:54.393666514 +0000 UTC m=+938.329127355" observedRunningTime="2025-12-03 05:56:55.837916368 +0000 UTC m=+939.773377209" watchObservedRunningTime="2025-12-03 05:56:55.838312888 +0000 UTC m=+939.773773729" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.870250 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-64665" podStartSLOduration=10.330822918 podStartE2EDuration="41.870225119s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.796296859 +0000 UTC m=+901.731757700" lastFinishedPulling="2025-12-03 05:56:49.33569906 +0000 UTC m=+933.271159901" observedRunningTime="2025-12-03 05:56:55.868222326 +0000 UTC m=+939.803683167" watchObservedRunningTime="2025-12-03 05:56:55.870225119 +0000 UTC m=+939.805685960" Dec 03 05:56:55 crc kubenswrapper[4810]: I1203 05:56:55.894100 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" podStartSLOduration=5.282564585 podStartE2EDuration="41.894079787s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.735087713 +0000 UTC m=+901.670548554" lastFinishedPulling="2025-12-03 05:56:54.346602915 +0000 UTC m=+938.282063756" observedRunningTime="2025-12-03 05:56:55.887924905 +0000 UTC m=+939.823385746" watchObservedRunningTime="2025-12-03 05:56:55.894079787 +0000 UTC m=+939.829540628" Dec 03 05:56:56 crc kubenswrapper[4810]: I1203 05:56:56.350071 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" event={"ID":"4a806c3b-f888-4612-b979-9f57fa2adabe","Type":"ContainerStarted","Data":"1e5abac1cb5fcb55106054129dcd6362c7e22a05ae2a371b9a76b4277b0fcae3"} Dec 03 05:56:56 crc kubenswrapper[4810]: I1203 05:56:56.350165 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:56:56 crc kubenswrapper[4810]: I1203 05:56:56.356970 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" event={"ID":"7306b11b-b539-4542-af3f-a738880af67f","Type":"ContainerStarted","Data":"4dd76e6f24a8323ac41e533e4c8518b5a21931e7a577d97f973aecd57f639560"} Dec 03 05:56:56 crc kubenswrapper[4810]: I1203 05:56:56.357006 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:56:56 crc kubenswrapper[4810]: I1203 05:56:56.369105 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" podStartSLOduration=4.929529294 podStartE2EDuration="41.369087093s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.805285996 +0000 UTC m=+901.740746837" lastFinishedPulling="2025-12-03 05:56:54.244843795 +0000 UTC m=+938.180304636" observedRunningTime="2025-12-03 05:56:56.366383051 +0000 UTC m=+940.301843902" watchObservedRunningTime="2025-12-03 05:56:56.369087093 +0000 UTC m=+940.304547934" Dec 03 05:56:56 crc kubenswrapper[4810]: I1203 05:56:56.419047 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" podStartSLOduration=35.378182246 podStartE2EDuration="41.419022257s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:48.032413167 +0000 UTC m=+931.967874008" lastFinishedPulling="2025-12-03 05:56:54.073253178 +0000 UTC m=+938.008714019" observedRunningTime="2025-12-03 05:56:56.411669764 +0000 UTC m=+940.347130625" watchObservedRunningTime="2025-12-03 05:56:56.419022257 +0000 UTC m=+940.354483098" Dec 03 05:57:01 crc kubenswrapper[4810]: I1203 05:57:01.413601 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686v7m42" Dec 03 05:57:02 crc kubenswrapper[4810]: E1203 05:57:02.382106 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" podUID="2f985dd7-de9f-498f-a297-f0602a4888a4" Dec 03 05:57:05 crc kubenswrapper[4810]: I1203 05:57:05.265866 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9hqh" Dec 03 05:57:05 crc kubenswrapper[4810]: I1203 05:57:05.532917 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-pds6l" Dec 03 05:57:05 crc kubenswrapper[4810]: I1203 05:57:05.573042 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" Dec 03 05:57:05 crc kubenswrapper[4810]: I1203 05:57:05.760104 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" Dec 03 05:57:05 crc kubenswrapper[4810]: I1203 05:57:05.858589 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-lvc9h" Dec 03 05:57:05 crc kubenswrapper[4810]: I1203 05:57:05.924721 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2hkc4" Dec 03 05:57:06 crc kubenswrapper[4810]: I1203 05:57:06.453198 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" event={"ID":"60612556-0f2a-4999-afb7-d71b32d18ef0","Type":"ContainerStarted","Data":"6d7d6d0f6e3a8b87f4b384a50f11b0372b567a37598c8fd43948a00092f2feed"} Dec 03 05:57:06 crc kubenswrapper[4810]: I1203 05:57:06.453560 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:57:07 crc kubenswrapper[4810]: I1203 05:57:07.400271 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" podStartSLOduration=5.311268951 podStartE2EDuration="53.400244787s" podCreationTimestamp="2025-12-03 05:56:14 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.737502907 +0000 UTC m=+901.672963738" lastFinishedPulling="2025-12-03 05:57:05.826478723 +0000 UTC m=+949.761939574" observedRunningTime="2025-12-03 05:57:06.473253282 +0000 UTC m=+950.408714123" watchObservedRunningTime="2025-12-03 05:57:07.400244787 +0000 UTC m=+951.335705618" Dec 03 05:57:08 crc kubenswrapper[4810]: I1203 05:57:08.029822 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-kq5vk" Dec 03 05:57:10 crc kubenswrapper[4810]: I1203 05:57:10.505862 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" event={"ID":"13befddb-d7f2-48bb-9d8c-8e61fbd8601a","Type":"ContainerStarted","Data":"7371fc1bf0c7e5bedda0af58c67c8b727efe7004bf8918e7aa3289bbe0f2ab6b"} Dec 03 05:57:10 crc kubenswrapper[4810]: I1203 05:57:10.506961 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:57:10 crc kubenswrapper[4810]: I1203 05:57:10.523267 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" podStartSLOduration=3.098839245 podStartE2EDuration="55.523244608s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.817069737 +0000 UTC m=+901.752530578" lastFinishedPulling="2025-12-03 05:57:10.24147509 +0000 UTC m=+954.176935941" observedRunningTime="2025-12-03 05:57:10.52063729 +0000 UTC m=+954.456098171" watchObservedRunningTime="2025-12-03 05:57:10.523244608 +0000 UTC m=+954.458705459" Dec 03 05:57:15 crc kubenswrapper[4810]: I1203 05:57:15.489305 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-pzmcl" Dec 03 05:57:15 crc kubenswrapper[4810]: I1203 05:57:15.974798 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-2ktb9" Dec 03 05:57:16 crc kubenswrapper[4810]: I1203 05:57:16.396141 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 05:57:21 crc kubenswrapper[4810]: I1203 05:57:21.634493 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" event={"ID":"2f985dd7-de9f-498f-a297-f0602a4888a4","Type":"ContainerStarted","Data":"1b5296bb29a35b4fbcd77a8221f2b034b4210ca17287c2d600983f76e4765124"} Dec 03 05:57:21 crc kubenswrapper[4810]: I1203 05:57:21.661781 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7nw2d" podStartSLOduration=7.308987187 podStartE2EDuration="1m6.661751438s" podCreationTimestamp="2025-12-03 05:56:15 +0000 UTC" firstStartedPulling="2025-12-03 05:56:17.796689369 +0000 UTC m=+901.732150210" lastFinishedPulling="2025-12-03 05:57:17.14945361 +0000 UTC m=+961.084914461" observedRunningTime="2025-12-03 05:57:21.657102846 +0000 UTC m=+965.592563717" watchObservedRunningTime="2025-12-03 05:57:21.661751438 +0000 UTC m=+965.597212309" Dec 03 05:57:25 crc kubenswrapper[4810]: I1203 05:57:25.677447 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:57:25 crc kubenswrapper[4810]: I1203 05:57:25.679017 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:57:25 crc kubenswrapper[4810]: I1203 05:57:25.679164 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 05:57:25 crc kubenswrapper[4810]: I1203 05:57:25.679916 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"54276ea7f067e034e52d0f67559acc42b63d689d697478d16f1565a902279985"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 05:57:25 crc kubenswrapper[4810]: I1203 05:57:25.680094 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://54276ea7f067e034e52d0f67559acc42b63d689d697478d16f1565a902279985" gracePeriod=600 Dec 03 05:57:28 crc kubenswrapper[4810]: I1203 05:57:28.720998 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="54276ea7f067e034e52d0f67559acc42b63d689d697478d16f1565a902279985" exitCode=0 Dec 03 05:57:28 crc kubenswrapper[4810]: I1203 05:57:28.721228 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"54276ea7f067e034e52d0f67559acc42b63d689d697478d16f1565a902279985"} Dec 03 05:57:28 crc kubenswrapper[4810]: I1203 05:57:28.721510 4810 scope.go:117] "RemoveContainer" containerID="40837cb8664b98412109347434fe923106e68d7421229ce0ff512909a9e08061" Dec 03 05:57:29 crc kubenswrapper[4810]: I1203 05:57:29.733242 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"44c04cb46113a276349e0decdfe2671ab188ec4674c9c0e0a836be88642df5e3"} Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.900254 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-94b4f9f45-d8mcn"] Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.902291 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.906304 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.906671 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-42qzt" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.906819 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.910590 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.919422 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-94b4f9f45-d8mcn"] Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.971145 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6947456757-jld82"] Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.973092 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.979552 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 05:57:47 crc kubenswrapper[4810]: I1203 05:57:47.980428 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6947456757-jld82"] Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.016791 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88f058b3-65fc-468b-b177-b267661f7a8f-config\") pod \"dnsmasq-dns-94b4f9f45-d8mcn\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.017516 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v825t\" (UniqueName: \"kubernetes.io/projected/88f058b3-65fc-468b-b177-b267661f7a8f-kube-api-access-v825t\") pod \"dnsmasq-dns-94b4f9f45-d8mcn\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.119075 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v825t\" (UniqueName: \"kubernetes.io/projected/88f058b3-65fc-468b-b177-b267661f7a8f-kube-api-access-v825t\") pod \"dnsmasq-dns-94b4f9f45-d8mcn\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.119133 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56sbs\" (UniqueName: \"kubernetes.io/projected/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-kube-api-access-56sbs\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.119170 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-config\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.119193 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88f058b3-65fc-468b-b177-b267661f7a8f-config\") pod \"dnsmasq-dns-94b4f9f45-d8mcn\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.119244 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-dns-svc\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.120342 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88f058b3-65fc-468b-b177-b267661f7a8f-config\") pod \"dnsmasq-dns-94b4f9f45-d8mcn\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.144557 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v825t\" (UniqueName: \"kubernetes.io/projected/88f058b3-65fc-468b-b177-b267661f7a8f-kube-api-access-v825t\") pod \"dnsmasq-dns-94b4f9f45-d8mcn\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.223197 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-config\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.223323 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-dns-svc\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.223398 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56sbs\" (UniqueName: \"kubernetes.io/projected/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-kube-api-access-56sbs\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.224225 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.224314 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-config\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.224419 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-dns-svc\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.244763 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56sbs\" (UniqueName: \"kubernetes.io/projected/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-kube-api-access-56sbs\") pod \"dnsmasq-dns-6947456757-jld82\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.300110 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.593064 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-94b4f9f45-d8mcn"] Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.951768 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" event={"ID":"88f058b3-65fc-468b-b177-b267661f7a8f","Type":"ContainerStarted","Data":"5b1a09032782feb8768e5b71ceffc5ca4453bbb462bfe4f3447411287004aa64"} Dec 03 05:57:48 crc kubenswrapper[4810]: I1203 05:57:48.963860 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6947456757-jld82"] Dec 03 05:57:48 crc kubenswrapper[4810]: W1203 05:57:48.973276 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod622a0d3d_aa6c_409a_9b64_1ab8ffa3cca5.slice/crio-c6b3dce030acea22f98bd4a9a8652625be10b200687264239d9947bbfe7b2d0a WatchSource:0}: Error finding container c6b3dce030acea22f98bd4a9a8652625be10b200687264239d9947bbfe7b2d0a: Status 404 returned error can't find the container with id c6b3dce030acea22f98bd4a9a8652625be10b200687264239d9947bbfe7b2d0a Dec 03 05:57:49 crc kubenswrapper[4810]: I1203 05:57:49.959972 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6947456757-jld82" event={"ID":"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5","Type":"ContainerStarted","Data":"c6b3dce030acea22f98bd4a9a8652625be10b200687264239d9947bbfe7b2d0a"} Dec 03 05:57:50 crc kubenswrapper[4810]: I1203 05:57:50.961927 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6947456757-jld82"] Dec 03 05:57:50 crc kubenswrapper[4810]: I1203 05:57:50.982193 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55dc666865-wxjgh"] Dec 03 05:57:50 crc kubenswrapper[4810]: I1203 05:57:50.983432 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.000329 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55dc666865-wxjgh"] Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.092809 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgqkb\" (UniqueName: \"kubernetes.io/projected/ebca4920-1a0d-4bfa-bec0-0426c75f4447-kube-api-access-fgqkb\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.092935 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-config\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.092971 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-dns-svc\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.195684 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgqkb\" (UniqueName: \"kubernetes.io/projected/ebca4920-1a0d-4bfa-bec0-0426c75f4447-kube-api-access-fgqkb\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.195788 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-config\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.195816 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-dns-svc\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.196802 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-dns-svc\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.197794 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-config\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.247217 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgqkb\" (UniqueName: \"kubernetes.io/projected/ebca4920-1a0d-4bfa-bec0-0426c75f4447-kube-api-access-fgqkb\") pod \"dnsmasq-dns-55dc666865-wxjgh\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.288538 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-94b4f9f45-d8mcn"] Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.317129 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d9886d5bf-jzz6t"] Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.318805 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.324998 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.338593 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d9886d5bf-jzz6t"] Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.506079 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-config\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.506281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxc2f\" (UniqueName: \"kubernetes.io/projected/055bc13f-a18d-44ce-ab83-c0702e38f1d1-kube-api-access-wxc2f\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.506318 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-dns-svc\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.612688 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxc2f\" (UniqueName: \"kubernetes.io/projected/055bc13f-a18d-44ce-ab83-c0702e38f1d1-kube-api-access-wxc2f\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.613247 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-dns-svc\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.613279 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-config\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.614318 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-dns-svc\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.614404 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-config\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.638807 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxc2f\" (UniqueName: \"kubernetes.io/projected/055bc13f-a18d-44ce-ab83-c0702e38f1d1-kube-api-access-wxc2f\") pod \"dnsmasq-dns-5d9886d5bf-jzz6t\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.658316 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:57:51 crc kubenswrapper[4810]: I1203 05:57:51.975962 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55dc666865-wxjgh"] Dec 03 05:57:52 crc kubenswrapper[4810]: W1203 05:57:52.003471 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebca4920_1a0d_4bfa_bec0_0426c75f4447.slice/crio-70a56d96afa75be976e1163ee21cc2dfefae49708090beedce52d27b69da7983 WatchSource:0}: Error finding container 70a56d96afa75be976e1163ee21cc2dfefae49708090beedce52d27b69da7983: Status 404 returned error can't find the container with id 70a56d96afa75be976e1163ee21cc2dfefae49708090beedce52d27b69da7983 Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.112360 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.114151 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.118153 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.118378 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.118390 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-k7shm" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.119306 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.120500 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.122716 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.122914 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.134484 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.209836 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d9886d5bf-jzz6t"] Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224166 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224249 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-config-data\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224276 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224321 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224355 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224395 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224420 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224440 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43dbe69c-d6b8-4773-ad88-79c3c975afdf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224477 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43dbe69c-d6b8-4773-ad88-79c3c975afdf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224541 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.224567 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzfvt\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-kube-api-access-fzfvt\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.325943 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326027 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326102 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326126 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326168 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326187 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43dbe69c-d6b8-4773-ad88-79c3c975afdf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326205 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43dbe69c-d6b8-4773-ad88-79c3c975afdf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326271 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326318 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzfvt\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-kube-api-access-fzfvt\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326351 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326406 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-config-data\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326559 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.326887 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.327682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.328228 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-config-data\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.329023 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.331268 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.333066 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.333830 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.340952 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43dbe69c-d6b8-4773-ad88-79c3c975afdf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.341363 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43dbe69c-d6b8-4773-ad88-79c3c975afdf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.343291 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzfvt\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-kube-api-access-fzfvt\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.352205 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.445957 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.452120 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.454832 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.455100 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.456396 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-n5rld" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.456397 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.456899 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.457049 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.457427 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.457925 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.459427 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.632718 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.632793 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.632846 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.632866 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs2kj\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-kube-api-access-rs2kj\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.632885 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/55cb8ef9-3722-41ab-8655-ccb1508619fd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.633230 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.633253 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.633276 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.633394 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.633415 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/55cb8ef9-3722-41ab-8655-ccb1508619fd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.633447 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.734829 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.734923 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.734947 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.734970 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.734992 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs2kj\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-kube-api-access-rs2kj\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735010 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/55cb8ef9-3722-41ab-8655-ccb1508619fd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735035 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735060 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735089 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735177 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735198 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/55cb8ef9-3722-41ab-8655-ccb1508619fd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.735536 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.736058 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.736192 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.736751 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.737018 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.738263 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.751871 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/55cb8ef9-3722-41ab-8655-ccb1508619fd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.752688 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs2kj\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-kube-api-access-rs2kj\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.753501 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/55cb8ef9-3722-41ab-8655-ccb1508619fd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.753656 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.753778 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.769251 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.840459 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:57:52 crc kubenswrapper[4810]: I1203 05:57:52.948116 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 05:57:53 crc kubenswrapper[4810]: I1203 05:57:53.031805 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"43dbe69c-d6b8-4773-ad88-79c3c975afdf","Type":"ContainerStarted","Data":"1b161a3023869e8bc7b1479a9a9aead2ca52c51671bf58dcf61c829d8b4a4d67"} Dec 03 05:57:53 crc kubenswrapper[4810]: I1203 05:57:53.034122 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" event={"ID":"ebca4920-1a0d-4bfa-bec0-0426c75f4447","Type":"ContainerStarted","Data":"70a56d96afa75be976e1163ee21cc2dfefae49708090beedce52d27b69da7983"} Dec 03 05:57:53 crc kubenswrapper[4810]: I1203 05:57:53.035433 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" event={"ID":"055bc13f-a18d-44ce-ab83-c0702e38f1d1","Type":"ContainerStarted","Data":"4a0133faac69c0df203c2f33518c21e9c011812da54558d164b24a432e4715a6"} Dec 03 05:57:53 crc kubenswrapper[4810]: I1203 05:57:53.515357 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 05:57:53 crc kubenswrapper[4810]: W1203 05:57:53.521848 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55cb8ef9_3722_41ab_8655_ccb1508619fd.slice/crio-2eb75d82e6241cfa11ef9bf36cf6b1306b01de7c577637c3adb4b0808f5069e5 WatchSource:0}: Error finding container 2eb75d82e6241cfa11ef9bf36cf6b1306b01de7c577637c3adb4b0808f5069e5: Status 404 returned error can't find the container with id 2eb75d82e6241cfa11ef9bf36cf6b1306b01de7c577637c3adb4b0808f5069e5 Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.051098 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"55cb8ef9-3722-41ab-8655-ccb1508619fd","Type":"ContainerStarted","Data":"2eb75d82e6241cfa11ef9bf36cf6b1306b01de7c577637c3adb4b0808f5069e5"} Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.069879 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.071580 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.077113 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-ztlng" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.077315 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.078053 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.085890 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.089003 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.094901 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187138 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187184 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b5ee63-b0f3-4133-a294-69ed680c5374-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187215 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b0b5ee63-b0f3-4133-a294-69ed680c5374-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187247 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-config-data-default\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187273 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6fvm\" (UniqueName: \"kubernetes.io/projected/b0b5ee63-b0f3-4133-a294-69ed680c5374-kube-api-access-s6fvm\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187314 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-kolla-config\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187344 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.187375 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0b5ee63-b0f3-4133-a294-69ed680c5374-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292420 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-kolla-config\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292484 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292524 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0b5ee63-b0f3-4133-a294-69ed680c5374-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292560 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292576 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b5ee63-b0f3-4133-a294-69ed680c5374-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292611 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b0b5ee63-b0f3-4133-a294-69ed680c5374-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292638 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-config-data-default\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.292664 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6fvm\" (UniqueName: \"kubernetes.io/projected/b0b5ee63-b0f3-4133-a294-69ed680c5374-kube-api-access-s6fvm\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.293804 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-kolla-config\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.294545 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.294924 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-config-data-default\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.295207 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b0b5ee63-b0f3-4133-a294-69ed680c5374-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.304999 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b5ee63-b0f3-4133-a294-69ed680c5374-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.309684 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0b5ee63-b0f3-4133-a294-69ed680c5374-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.316372 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0b5ee63-b0f3-4133-a294-69ed680c5374-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.331933 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6fvm\" (UniqueName: \"kubernetes.io/projected/b0b5ee63-b0f3-4133-a294-69ed680c5374-kube-api-access-s6fvm\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.338570 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b0b5ee63-b0f3-4133-a294-69ed680c5374\") " pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.410392 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 05:57:54 crc kubenswrapper[4810]: I1203 05:57:54.834036 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.061994 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b0b5ee63-b0f3-4133-a294-69ed680c5374","Type":"ContainerStarted","Data":"888237cd51cb89a4714d4793e694327b7fc6f44dc69f06e05788306018279761"} Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.404854 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.406775 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.410632 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.410861 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.411011 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-m8d2q" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.414375 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.429937 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.518970 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hf7k\" (UniqueName: \"kubernetes.io/projected/d8d3f680-ee84-4492-8d18-278d088e1332-kube-api-access-5hf7k\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519073 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519097 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8d3f680-ee84-4492-8d18-278d088e1332-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519164 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519186 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519243 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d8d3f680-ee84-4492-8d18-278d088e1332-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519357 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.519409 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8d3f680-ee84-4492-8d18-278d088e1332-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.614589 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.619102 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.621876 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.623091 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-rmtz2" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.625394 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627078 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627134 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8d3f680-ee84-4492-8d18-278d088e1332-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627191 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hf7k\" (UniqueName: \"kubernetes.io/projected/d8d3f680-ee84-4492-8d18-278d088e1332-kube-api-access-5hf7k\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627213 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627228 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8d3f680-ee84-4492-8d18-278d088e1332-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627272 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627304 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627334 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d8d3f680-ee84-4492-8d18-278d088e1332-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.627807 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d8d3f680-ee84-4492-8d18-278d088e1332-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.628864 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.629472 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.631216 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.640462 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d8d3f680-ee84-4492-8d18-278d088e1332-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.641836 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8d3f680-ee84-4492-8d18-278d088e1332-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.642667 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8d3f680-ee84-4492-8d18-278d088e1332-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.658247 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.661009 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hf7k\" (UniqueName: \"kubernetes.io/projected/d8d3f680-ee84-4492-8d18-278d088e1332-kube-api-access-5hf7k\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.679142 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8d3f680-ee84-4492-8d18-278d088e1332\") " pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.728619 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.728761 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-config-data\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.728842 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4bh4\" (UniqueName: \"kubernetes.io/projected/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-kube-api-access-c4bh4\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.728939 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.728999 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-kolla-config\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.754497 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.835583 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-kolla-config\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.835659 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.835714 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-config-data\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.835759 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4bh4\" (UniqueName: \"kubernetes.io/projected/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-kube-api-access-c4bh4\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.835834 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.842544 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-kolla-config\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.842935 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-config-data\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.902500 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.905468 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:55 crc kubenswrapper[4810]: I1203 05:57:55.920515 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4bh4\" (UniqueName: \"kubernetes.io/projected/c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4-kube-api-access-c4bh4\") pod \"memcached-0\" (UID: \"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4\") " pod="openstack/memcached-0" Dec 03 05:57:56 crc kubenswrapper[4810]: I1203 05:57:56.050174 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 05:57:56 crc kubenswrapper[4810]: I1203 05:57:56.500715 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 05:57:56 crc kubenswrapper[4810]: I1203 05:57:56.623113 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.219254 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.228305 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.231461 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-t2t8w" Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.250434 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.371034 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bbtw\" (UniqueName: \"kubernetes.io/projected/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad-kube-api-access-7bbtw\") pod \"kube-state-metrics-0\" (UID: \"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad\") " pod="openstack/kube-state-metrics-0" Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.472521 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bbtw\" (UniqueName: \"kubernetes.io/projected/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad-kube-api-access-7bbtw\") pod \"kube-state-metrics-0\" (UID: \"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad\") " pod="openstack/kube-state-metrics-0" Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.520035 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bbtw\" (UniqueName: \"kubernetes.io/projected/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad-kube-api-access-7bbtw\") pod \"kube-state-metrics-0\" (UID: \"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad\") " pod="openstack/kube-state-metrics-0" Dec 03 05:57:57 crc kubenswrapper[4810]: I1203 05:57:57.565657 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.155148 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4x8tn"] Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.157239 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.159824 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.160084 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-87vq8" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.161477 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.185516 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4x8tn"] Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.197277 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-qd85b"] Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.201081 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.210956 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qd85b"] Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.297647 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-lib\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.297699 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-run\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.297724 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-log\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.297773 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-etc-ovs\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.297794 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f8kw\" (UniqueName: \"kubernetes.io/projected/0df96f16-d193-4ecc-a624-e721c61a42af-kube-api-access-8f8kw\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298025 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-log-ovn\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298048 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0df96f16-d193-4ecc-a624-e721c61a42af-scripts\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298070 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df96f16-d193-4ecc-a624-e721c61a42af-combined-ca-bundle\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298087 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-run-ovn\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298110 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe7b8456-b2a9-44b7-b00b-320854a4c571-scripts\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298134 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-run\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298187 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nvc8\" (UniqueName: \"kubernetes.io/projected/fe7b8456-b2a9-44b7-b00b-320854a4c571-kube-api-access-5nvc8\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.298204 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0df96f16-d193-4ecc-a624-e721c61a42af-ovn-controller-tls-certs\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399470 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-log\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399544 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-etc-ovs\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399578 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f8kw\" (UniqueName: \"kubernetes.io/projected/0df96f16-d193-4ecc-a624-e721c61a42af-kube-api-access-8f8kw\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399662 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-log-ovn\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399686 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0df96f16-d193-4ecc-a624-e721c61a42af-scripts\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df96f16-d193-4ecc-a624-e721c61a42af-combined-ca-bundle\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399752 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-run-ovn\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399785 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe7b8456-b2a9-44b7-b00b-320854a4c571-scripts\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399814 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-run\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399856 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nvc8\" (UniqueName: \"kubernetes.io/projected/fe7b8456-b2a9-44b7-b00b-320854a4c571-kube-api-access-5nvc8\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399877 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0df96f16-d193-4ecc-a624-e721c61a42af-ovn-controller-tls-certs\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399904 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-lib\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.399932 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-run\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.400628 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-run\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.400898 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-run\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.401021 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-log\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.401172 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-etc-ovs\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.401220 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/fe7b8456-b2a9-44b7-b00b-320854a4c571-var-lib\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.401298 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-log-ovn\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.401323 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0df96f16-d193-4ecc-a624-e721c61a42af-var-run-ovn\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.403648 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe7b8456-b2a9-44b7-b00b-320854a4c571-scripts\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.405327 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0df96f16-d193-4ecc-a624-e721c61a42af-scripts\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.407057 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0df96f16-d193-4ecc-a624-e721c61a42af-ovn-controller-tls-certs\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.407818 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df96f16-d193-4ecc-a624-e721c61a42af-combined-ca-bundle\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.424340 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.425831 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.433243 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.433549 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.433780 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.434017 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.434201 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-tmq59" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.438129 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.472711 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nvc8\" (UniqueName: \"kubernetes.io/projected/fe7b8456-b2a9-44b7-b00b-320854a4c571-kube-api-access-5nvc8\") pod \"ovn-controller-ovs-qd85b\" (UID: \"fe7b8456-b2a9-44b7-b00b-320854a4c571\") " pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.478141 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f8kw\" (UniqueName: \"kubernetes.io/projected/0df96f16-d193-4ecc-a624-e721c61a42af-kube-api-access-8f8kw\") pod \"ovn-controller-4x8tn\" (UID: \"0df96f16-d193-4ecc-a624-e721c61a42af\") " pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.497088 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501100 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/907b4534-7daf-4a4f-ae5b-65d58194cabf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501172 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501399 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501453 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501569 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501612 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/907b4534-7daf-4a4f-ae5b-65d58194cabf-config\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501665 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/907b4534-7daf-4a4f-ae5b-65d58194cabf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.501806 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64x67\" (UniqueName: \"kubernetes.io/projected/907b4534-7daf-4a4f-ae5b-65d58194cabf-kube-api-access-64x67\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.547856 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.603894 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64x67\" (UniqueName: \"kubernetes.io/projected/907b4534-7daf-4a4f-ae5b-65d58194cabf-kube-api-access-64x67\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604048 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/907b4534-7daf-4a4f-ae5b-65d58194cabf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604110 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604199 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604225 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604297 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604356 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/907b4534-7daf-4a4f-ae5b-65d58194cabf-config\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.604421 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/907b4534-7daf-4a4f-ae5b-65d58194cabf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.605091 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/907b4534-7daf-4a4f-ae5b-65d58194cabf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.605360 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.605365 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/907b4534-7daf-4a4f-ae5b-65d58194cabf-config\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.606319 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/907b4534-7daf-4a4f-ae5b-65d58194cabf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.610283 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.610597 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.615691 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907b4534-7daf-4a4f-ae5b-65d58194cabf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.630505 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64x67\" (UniqueName: \"kubernetes.io/projected/907b4534-7daf-4a4f-ae5b-65d58194cabf-kube-api-access-64x67\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.648373 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"907b4534-7daf-4a4f-ae5b-65d58194cabf\") " pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:01 crc kubenswrapper[4810]: I1203 05:58:01.841572 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:03 crc kubenswrapper[4810]: W1203 05:58:03.260984 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4f7dfb9_9f84_4331_ad73_4b4ffa8120b4.slice/crio-f6db4216a6c186e04d093a717999a80720bc8bea7387f5f1fe6ccffd80148362 WatchSource:0}: Error finding container f6db4216a6c186e04d093a717999a80720bc8bea7387f5f1fe6ccffd80148362: Status 404 returned error can't find the container with id f6db4216a6c186e04d093a717999a80720bc8bea7387f5f1fe6ccffd80148362 Dec 03 05:58:03 crc kubenswrapper[4810]: W1203 05:58:03.262622 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8d3f680_ee84_4492_8d18_278d088e1332.slice/crio-a68c69794c39e56725b212d469726f3771295dfb0f6dfdeeaa7acfc40780c719 WatchSource:0}: Error finding container a68c69794c39e56725b212d469726f3771295dfb0f6dfdeeaa7acfc40780c719: Status 404 returned error can't find the container with id a68c69794c39e56725b212d469726f3771295dfb0f6dfdeeaa7acfc40780c719 Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.291030 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8d3f680-ee84-4492-8d18-278d088e1332","Type":"ContainerStarted","Data":"a68c69794c39e56725b212d469726f3771295dfb0f6dfdeeaa7acfc40780c719"} Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.292418 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4","Type":"ContainerStarted","Data":"f6db4216a6c186e04d093a717999a80720bc8bea7387f5f1fe6ccffd80148362"} Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.802599 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.804430 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.808327 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.811601 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-mqfxv" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.811972 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.812192 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.812246 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.893182 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.893301 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.893353 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03355df-e435-4db6-8f0a-10a6618f4bfa-config\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.893702 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.893781 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b03355df-e435-4db6-8f0a-10a6618f4bfa-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.893950 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.894062 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b03355df-e435-4db6-8f0a-10a6618f4bfa-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.894123 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h47zb\" (UniqueName: \"kubernetes.io/projected/b03355df-e435-4db6-8f0a-10a6618f4bfa-kube-api-access-h47zb\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996180 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996336 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03355df-e435-4db6-8f0a-10a6618f4bfa-config\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996528 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996588 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b03355df-e435-4db6-8f0a-10a6618f4bfa-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996631 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996667 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b03355df-e435-4db6-8f0a-10a6618f4bfa-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.996705 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h47zb\" (UniqueName: \"kubernetes.io/projected/b03355df-e435-4db6-8f0a-10a6618f4bfa-kube-api-access-h47zb\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:04 crc kubenswrapper[4810]: I1203 05:58:04.998016 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.000125 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b03355df-e435-4db6-8f0a-10a6618f4bfa-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.000621 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03355df-e435-4db6-8f0a-10a6618f4bfa-config\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.000773 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b03355df-e435-4db6-8f0a-10a6618f4bfa-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.007898 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.009371 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.012503 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b03355df-e435-4db6-8f0a-10a6618f4bfa-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.014407 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h47zb\" (UniqueName: \"kubernetes.io/projected/b03355df-e435-4db6-8f0a-10a6618f4bfa-kube-api-access-h47zb\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.036587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b03355df-e435-4db6-8f0a-10a6618f4bfa\") " pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:05 crc kubenswrapper[4810]: I1203 05:58:05.141455 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:09 crc kubenswrapper[4810]: I1203 05:58:09.994499 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6psd9"] Dec 03 05:58:09 crc kubenswrapper[4810]: I1203 05:58:09.997234 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.009678 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6psd9"] Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.085904 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-utilities\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.085969 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj8v4\" (UniqueName: \"kubernetes.io/projected/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-kube-api-access-sj8v4\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.086001 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-catalog-content\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.187762 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-utilities\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.188081 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj8v4\" (UniqueName: \"kubernetes.io/projected/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-kube-api-access-sj8v4\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.188197 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-catalog-content\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.188291 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-utilities\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.188724 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-catalog-content\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.207905 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj8v4\" (UniqueName: \"kubernetes.io/projected/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-kube-api-access-sj8v4\") pod \"redhat-marketplace-6psd9\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:10 crc kubenswrapper[4810]: I1203 05:58:10.324919 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:12 crc kubenswrapper[4810]: I1203 05:58:12.233130 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4x8tn"] Dec 03 05:58:17 crc kubenswrapper[4810]: E1203 05:58:17.402274 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-rabbitmq:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:17 crc kubenswrapper[4810]: E1203 05:58:17.402621 4810 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-rabbitmq:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:17 crc kubenswrapper[4810]: E1203 05:58:17.402794 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos9/openstack-rabbitmq:2e38c527ddf6e767040136ecf014e7b9,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fzfvt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(43dbe69c-d6b8-4773-ad88-79c3c975afdf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:58:17 crc kubenswrapper[4810]: E1203 05:58:17.404075 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" Dec 03 05:58:17 crc kubenswrapper[4810]: E1203 05:58:17.451370 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-rabbitmq:2e38c527ddf6e767040136ecf014e7b9\\\"\"" pod="openstack/rabbitmq-server-0" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" Dec 03 05:58:17 crc kubenswrapper[4810]: I1203 05:58:17.997270 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qd85b"] Dec 03 05:58:19 crc kubenswrapper[4810]: E1203 05:58:19.523010 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-mariadb:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:19 crc kubenswrapper[4810]: E1203 05:58:19.523425 4810 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-mariadb:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:19 crc kubenswrapper[4810]: E1203 05:58:19.523563 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-master-centos9/openstack-mariadb:2e38c527ddf6e767040136ecf014e7b9,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s6fvm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(b0b5ee63-b0f3-4133-a294-69ed680c5374): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:58:19 crc kubenswrapper[4810]: E1203 05:58:19.524786 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="b0b5ee63-b0f3-4133-a294-69ed680c5374" Dec 03 05:58:20 crc kubenswrapper[4810]: W1203 05:58:20.137104 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe7b8456_b2a9_44b7_b00b_320854a4c571.slice/crio-d7137c3421449d181deb663a6117c214269b01bcea69e67cfdc1eeab3b840173 WatchSource:0}: Error finding container d7137c3421449d181deb663a6117c214269b01bcea69e67cfdc1eeab3b840173: Status 404 returned error can't find the container with id d7137c3421449d181deb663a6117c214269b01bcea69e67cfdc1eeab3b840173 Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.152724 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.152815 4810 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.153083 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v825t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-94b4f9f45-d8mcn_openstack(88f058b3-65fc-468b-b177-b267661f7a8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.154297 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" podUID="88f058b3-65fc-468b-b177-b267661f7a8f" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.173874 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.173938 4810 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.174941 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-56sbs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6947456757-jld82_openstack(622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.176202 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6947456757-jld82" podUID="622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5" Dec 03 05:58:20 crc kubenswrapper[4810]: I1203 05:58:20.464320 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qd85b" event={"ID":"fe7b8456-b2a9-44b7-b00b-320854a4c571","Type":"ContainerStarted","Data":"d7137c3421449d181deb663a6117c214269b01bcea69e67cfdc1eeab3b840173"} Dec 03 05:58:20 crc kubenswrapper[4810]: I1203 05:58:20.466497 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4x8tn" event={"ID":"0df96f16-d193-4ecc-a624-e721c61a42af","Type":"ContainerStarted","Data":"ba2df325f087b3b808865b8b5a66899e8fe8d098da21313faa4595e959a73445"} Dec 03 05:58:20 crc kubenswrapper[4810]: E1203 05:58:20.468631 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-mariadb:2e38c527ddf6e767040136ecf014e7b9\\\"\"" pod="openstack/openstack-galera-0" podUID="b0b5ee63-b0f3-4133-a294-69ed680c5374" Dec 03 05:58:20 crc kubenswrapper[4810]: I1203 05:58:20.564276 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 05:58:21 crc kubenswrapper[4810]: W1203 05:58:21.145830 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod790a0398_c3e3_4070_9ce3_7ecbf8b2bcad.slice/crio-99e842efd559639cd582dc6f1f737a95cdf0556be38427e1dcdd82f2f583592b WatchSource:0}: Error finding container 99e842efd559639cd582dc6f1f737a95cdf0556be38427e1dcdd82f2f583592b: Status 404 returned error can't find the container with id 99e842efd559639cd582dc6f1f737a95cdf0556be38427e1dcdd82f2f583592b Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.290060 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.328819 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.403612 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-dns-svc\") pod \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.403822 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56sbs\" (UniqueName: \"kubernetes.io/projected/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-kube-api-access-56sbs\") pod \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.403888 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-config\") pod \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\" (UID: \"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5\") " Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.404477 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88f058b3-65fc-468b-b177-b267661f7a8f-config\") pod \"88f058b3-65fc-468b-b177-b267661f7a8f\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.404503 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v825t\" (UniqueName: \"kubernetes.io/projected/88f058b3-65fc-468b-b177-b267661f7a8f-kube-api-access-v825t\") pod \"88f058b3-65fc-468b-b177-b267661f7a8f\" (UID: \"88f058b3-65fc-468b-b177-b267661f7a8f\") " Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.406099 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-config" (OuterVolumeSpecName: "config") pod "622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5" (UID: "622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.406931 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88f058b3-65fc-468b-b177-b267661f7a8f-config" (OuterVolumeSpecName: "config") pod "88f058b3-65fc-468b-b177-b267661f7a8f" (UID: "88f058b3-65fc-468b-b177-b267661f7a8f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.407111 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5" (UID: "622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.408987 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88f058b3-65fc-468b-b177-b267661f7a8f-kube-api-access-v825t" (OuterVolumeSpecName: "kube-api-access-v825t") pod "88f058b3-65fc-468b-b177-b267661f7a8f" (UID: "88f058b3-65fc-468b-b177-b267661f7a8f"). InnerVolumeSpecName "kube-api-access-v825t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.410064 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-kube-api-access-56sbs" (OuterVolumeSpecName: "kube-api-access-56sbs") pod "622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5" (UID: "622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5"). InnerVolumeSpecName "kube-api-access-56sbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.479179 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad","Type":"ContainerStarted","Data":"99e842efd559639cd582dc6f1f737a95cdf0556be38427e1dcdd82f2f583592b"} Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.482156 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" event={"ID":"88f058b3-65fc-468b-b177-b267661f7a8f","Type":"ContainerDied","Data":"5b1a09032782feb8768e5b71ceffc5ca4453bbb462bfe4f3447411287004aa64"} Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.482261 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-94b4f9f45-d8mcn" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.486603 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6947456757-jld82" event={"ID":"622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5","Type":"ContainerDied","Data":"c6b3dce030acea22f98bd4a9a8652625be10b200687264239d9947bbfe7b2d0a"} Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.486658 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6947456757-jld82" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.518411 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56sbs\" (UniqueName: \"kubernetes.io/projected/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-kube-api-access-56sbs\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.518435 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.518445 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88f058b3-65fc-468b-b177-b267661f7a8f-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.518456 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v825t\" (UniqueName: \"kubernetes.io/projected/88f058b3-65fc-468b-b177-b267661f7a8f-kube-api-access-v825t\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.518482 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.559019 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 05:58:21 crc kubenswrapper[4810]: W1203 05:58:21.643956 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb03355df_e435_4db6_8f0a_10a6618f4bfa.slice/crio-1f7e11884564e36411f41fbcd88daa3d7b71c898c21c3e72e8e72fbe9f784153 WatchSource:0}: Error finding container 1f7e11884564e36411f41fbcd88daa3d7b71c898c21c3e72e8e72fbe9f784153: Status 404 returned error can't find the container with id 1f7e11884564e36411f41fbcd88daa3d7b71c898c21c3e72e8e72fbe9f784153 Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.695669 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6947456757-jld82"] Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.702683 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.715430 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6947456757-jld82"] Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.725621 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-94b4f9f45-d8mcn"] Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.733144 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-94b4f9f45-d8mcn"] Dec 03 05:58:21 crc kubenswrapper[4810]: I1203 05:58:21.752548 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6psd9"] Dec 03 05:58:21 crc kubenswrapper[4810]: W1203 05:58:21.786146 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod907b4534_7daf_4a4f_ae5b_65d58194cabf.slice/crio-d3d3f366ea7a23ead8df26cb6191e5340e61e5a0052c17fa4992b5e1caae381e WatchSource:0}: Error finding container d3d3f366ea7a23ead8df26cb6191e5340e61e5a0052c17fa4992b5e1caae381e: Status 404 returned error can't find the container with id d3d3f366ea7a23ead8df26cb6191e5340e61e5a0052c17fa4992b5e1caae381e Dec 03 05:58:21 crc kubenswrapper[4810]: W1203 05:58:21.788693 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0a3af64_f45a_4cfe_b6d8_0e81b0f4be38.slice/crio-63be1b10591b2fcfb16d58833d73da0554cfd7caff7fcacbb7cfe0c788f1614e WatchSource:0}: Error finding container 63be1b10591b2fcfb16d58833d73da0554cfd7caff7fcacbb7cfe0c788f1614e: Status 404 returned error can't find the container with id 63be1b10591b2fcfb16d58833d73da0554cfd7caff7fcacbb7cfe0c788f1614e Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.396688 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5" path="/var/lib/kubelet/pods/622a0d3d-aa6c-409a-9b64-1ab8ffa3cca5/volumes" Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.397150 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88f058b3-65fc-468b-b177-b267661f7a8f" path="/var/lib/kubelet/pods/88f058b3-65fc-468b-b177-b267661f7a8f/volumes" Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.496484 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b03355df-e435-4db6-8f0a-10a6618f4bfa","Type":"ContainerStarted","Data":"1f7e11884564e36411f41fbcd88daa3d7b71c898c21c3e72e8e72fbe9f784153"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.498360 4810 generic.go:334] "Generic (PLEG): container finished" podID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerID="5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f" exitCode=0 Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.498459 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" event={"ID":"ebca4920-1a0d-4bfa-bec0-0426c75f4447","Type":"ContainerDied","Data":"5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.500954 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8d3f680-ee84-4492-8d18-278d088e1332","Type":"ContainerStarted","Data":"7799a72afa84f6ac0128d61f80cf11f598014191c091119b1f454fdd8b4d0791"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.503933 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"55cb8ef9-3722-41ab-8655-ccb1508619fd","Type":"ContainerStarted","Data":"af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.507001 4810 generic.go:334] "Generic (PLEG): container finished" podID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerID="4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee" exitCode=0 Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.507096 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" event={"ID":"055bc13f-a18d-44ce-ab83-c0702e38f1d1","Type":"ContainerDied","Data":"4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.508372 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"907b4534-7daf-4a4f-ae5b-65d58194cabf","Type":"ContainerStarted","Data":"d3d3f366ea7a23ead8df26cb6191e5340e61e5a0052c17fa4992b5e1caae381e"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.510951 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4","Type":"ContainerStarted","Data":"22b2cad808a12cd44632ffaca5eebf82cdcfa0dd38d905d5d063c3e72145e13d"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.511056 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.513455 4810 generic.go:334] "Generic (PLEG): container finished" podID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerID="7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e" exitCode=0 Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.513495 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6psd9" event={"ID":"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38","Type":"ContainerDied","Data":"7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.513517 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6psd9" event={"ID":"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38","Type":"ContainerStarted","Data":"63be1b10591b2fcfb16d58833d73da0554cfd7caff7fcacbb7cfe0c788f1614e"} Dec 03 05:58:22 crc kubenswrapper[4810]: I1203 05:58:22.623159 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=9.660550426 podStartE2EDuration="27.623132275s" podCreationTimestamp="2025-12-03 05:57:55 +0000 UTC" firstStartedPulling="2025-12-03 05:58:03.271024926 +0000 UTC m=+1007.206485767" lastFinishedPulling="2025-12-03 05:58:21.233606775 +0000 UTC m=+1025.169067616" observedRunningTime="2025-12-03 05:58:22.622033336 +0000 UTC m=+1026.557494177" watchObservedRunningTime="2025-12-03 05:58:22.623132275 +0000 UTC m=+1026.558593116" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.468214 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-hthr4"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.471691 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.474565 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.482687 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-hthr4"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.577066 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/366d9c09-ff45-486b-957f-abeba4ccfda0-ovs-rundir\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.577112 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d9c09-ff45-486b-957f-abeba4ccfda0-combined-ca-bundle\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.577147 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/366d9c09-ff45-486b-957f-abeba4ccfda0-config\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.577180 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/366d9c09-ff45-486b-957f-abeba4ccfda0-ovn-rundir\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.577196 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/366d9c09-ff45-486b-957f-abeba4ccfda0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.577246 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr7mj\" (UniqueName: \"kubernetes.io/projected/366d9c09-ff45-486b-957f-abeba4ccfda0-kube-api-access-sr7mj\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.602718 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55dc666865-wxjgh"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.634347 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d57cc48bc-sng7j"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.636007 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.638979 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.647750 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d57cc48bc-sng7j"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678627 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-ovsdbserver-nb\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678687 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d9c09-ff45-486b-957f-abeba4ccfda0-combined-ca-bundle\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678711 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/366d9c09-ff45-486b-957f-abeba4ccfda0-ovs-rundir\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678757 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/366d9c09-ff45-486b-957f-abeba4ccfda0-config\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678796 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/366d9c09-ff45-486b-957f-abeba4ccfda0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678813 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/366d9c09-ff45-486b-957f-abeba4ccfda0-ovn-rundir\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678832 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwfrz\" (UniqueName: \"kubernetes.io/projected/be351a50-d782-4c52-98e1-b66372d2b096-kube-api-access-mwfrz\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678852 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-dns-svc\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678881 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr7mj\" (UniqueName: \"kubernetes.io/projected/366d9c09-ff45-486b-957f-abeba4ccfda0-kube-api-access-sr7mj\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.678930 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-config\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.679847 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/366d9c09-ff45-486b-957f-abeba4ccfda0-ovn-rundir\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.679856 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/366d9c09-ff45-486b-957f-abeba4ccfda0-ovs-rundir\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.680516 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/366d9c09-ff45-486b-957f-abeba4ccfda0-config\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.686376 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d9c09-ff45-486b-957f-abeba4ccfda0-combined-ca-bundle\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.695573 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/366d9c09-ff45-486b-957f-abeba4ccfda0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.704418 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr7mj\" (UniqueName: \"kubernetes.io/projected/366d9c09-ff45-486b-957f-abeba4ccfda0-kube-api-access-sr7mj\") pod \"ovn-controller-metrics-hthr4\" (UID: \"366d9c09-ff45-486b-957f-abeba4ccfda0\") " pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.780866 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-ovsdbserver-nb\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.780952 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwfrz\" (UniqueName: \"kubernetes.io/projected/be351a50-d782-4c52-98e1-b66372d2b096-kube-api-access-mwfrz\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.780980 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-dns-svc\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.781039 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-config\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.782895 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-ovsdbserver-nb\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.783141 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-dns-svc\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.783945 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-config\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.804888 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hthr4" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.809567 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwfrz\" (UniqueName: \"kubernetes.io/projected/be351a50-d782-4c52-98e1-b66372d2b096-kube-api-access-mwfrz\") pod \"dnsmasq-dns-d57cc48bc-sng7j\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.812240 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d9886d5bf-jzz6t"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.821260 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65459b959f-kk7hv"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.824241 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.840313 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.844165 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65459b959f-kk7hv"] Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.884802 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-dns-svc\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.884866 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-sb\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.884897 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4vx6\" (UniqueName: \"kubernetes.io/projected/b644c5e4-4098-4fbf-8907-00a35976cb46-kube-api-access-s4vx6\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.884931 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-nb\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.884949 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-config\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.957961 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.986594 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-dns-svc\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.986649 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-sb\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.986676 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4vx6\" (UniqueName: \"kubernetes.io/projected/b644c5e4-4098-4fbf-8907-00a35976cb46-kube-api-access-s4vx6\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.986698 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-nb\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.986712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-config\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.987646 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-dns-svc\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.987686 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-sb\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.987822 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-config\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:24 crc kubenswrapper[4810]: I1203 05:58:24.987978 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-nb\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:25 crc kubenswrapper[4810]: I1203 05:58:25.003229 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4vx6\" (UniqueName: \"kubernetes.io/projected/b644c5e4-4098-4fbf-8907-00a35976cb46-kube-api-access-s4vx6\") pod \"dnsmasq-dns-65459b959f-kk7hv\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:25 crc kubenswrapper[4810]: I1203 05:58:25.158722 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:25 crc kubenswrapper[4810]: I1203 05:58:25.543683 4810 generic.go:334] "Generic (PLEG): container finished" podID="d8d3f680-ee84-4492-8d18-278d088e1332" containerID="7799a72afa84f6ac0128d61f80cf11f598014191c091119b1f454fdd8b4d0791" exitCode=0 Dec 03 05:58:25 crc kubenswrapper[4810]: I1203 05:58:25.543771 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8d3f680-ee84-4492-8d18-278d088e1332","Type":"ContainerDied","Data":"7799a72afa84f6ac0128d61f80cf11f598014191c091119b1f454fdd8b4d0791"} Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.055173 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.505161 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d57cc48bc-sng7j"] Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.525002 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-hthr4"] Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.573517 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" event={"ID":"ebca4920-1a0d-4bfa-bec0-0426c75f4447","Type":"ContainerStarted","Data":"d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42"} Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.573753 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerName="dnsmasq-dns" containerID="cri-o://d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42" gracePeriod=10 Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.573883 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.600756 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" podStartSLOduration=7.328580352 podStartE2EDuration="36.600698023s" podCreationTimestamp="2025-12-03 05:57:50 +0000 UTC" firstStartedPulling="2025-12-03 05:57:52.010284211 +0000 UTC m=+995.945745052" lastFinishedPulling="2025-12-03 05:58:21.282401882 +0000 UTC m=+1025.217862723" observedRunningTime="2025-12-03 05:58:26.594702695 +0000 UTC m=+1030.530163536" watchObservedRunningTime="2025-12-03 05:58:26.600698023 +0000 UTC m=+1030.536158874" Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.828575 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65459b959f-kk7hv"] Dec 03 05:58:26 crc kubenswrapper[4810]: I1203 05:58:26.992376 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.027119 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgqkb\" (UniqueName: \"kubernetes.io/projected/ebca4920-1a0d-4bfa-bec0-0426c75f4447-kube-api-access-fgqkb\") pod \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.027179 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-config\") pod \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.027243 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-dns-svc\") pod \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\" (UID: \"ebca4920-1a0d-4bfa-bec0-0426c75f4447\") " Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.033590 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebca4920-1a0d-4bfa-bec0-0426c75f4447-kube-api-access-fgqkb" (OuterVolumeSpecName: "kube-api-access-fgqkb") pod "ebca4920-1a0d-4bfa-bec0-0426c75f4447" (UID: "ebca4920-1a0d-4bfa-bec0-0426c75f4447"). InnerVolumeSpecName "kube-api-access-fgqkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.075670 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-config" (OuterVolumeSpecName: "config") pod "ebca4920-1a0d-4bfa-bec0-0426c75f4447" (UID: "ebca4920-1a0d-4bfa-bec0-0426c75f4447"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.091027 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ebca4920-1a0d-4bfa-bec0-0426c75f4447" (UID: "ebca4920-1a0d-4bfa-bec0-0426c75f4447"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.128737 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgqkb\" (UniqueName: \"kubernetes.io/projected/ebca4920-1a0d-4bfa-bec0-0426c75f4447-kube-api-access-fgqkb\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.128782 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.128792 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ebca4920-1a0d-4bfa-bec0-0426c75f4447-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.560433 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65459b959f-kk7hv"] Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.630389 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b8d9f7b57-c7z68"] Dec 03 05:58:27 crc kubenswrapper[4810]: E1203 05:58:27.631325 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerName="dnsmasq-dns" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.631340 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerName="dnsmasq-dns" Dec 03 05:58:27 crc kubenswrapper[4810]: E1203 05:58:27.631377 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerName="init" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.631386 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerName="init" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.632168 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerName="dnsmasq-dns" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.638542 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.713840 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b8d9f7b57-c7z68"] Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.730146 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4x8tn" event={"ID":"0df96f16-d193-4ecc-a624-e721c61a42af","Type":"ContainerStarted","Data":"58a17659e06843614074993db640a7fd9e30d3cfd6b8a3d50eff4432ff98f48f"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.737334 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4x8tn" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.761218 4810 generic.go:334] "Generic (PLEG): container finished" podID="fe7b8456-b2a9-44b7-b00b-320854a4c571" containerID="bffb6bb2500587828b862557bfe2baa64ee01a865f2ab59d119349e6fd8c3c95" exitCode=0 Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.761330 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qd85b" event={"ID":"fe7b8456-b2a9-44b7-b00b-320854a4c571","Type":"ContainerDied","Data":"bffb6bb2500587828b862557bfe2baa64ee01a865f2ab59d119349e6fd8c3c95"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.763066 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-config\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.763132 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4g7h\" (UniqueName: \"kubernetes.io/projected/27ca79d5-c933-4b20-9f26-9885671647bf-kube-api-access-z4g7h\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.763299 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-nb\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.763355 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-dns-svc\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.763377 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-sb\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.770438 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4x8tn" podStartSLOduration=20.071243582 podStartE2EDuration="26.770423596s" podCreationTimestamp="2025-12-03 05:58:01 +0000 UTC" firstStartedPulling="2025-12-03 05:58:19.488391594 +0000 UTC m=+1023.423852435" lastFinishedPulling="2025-12-03 05:58:26.187571608 +0000 UTC m=+1030.123032449" observedRunningTime="2025-12-03 05:58:27.766470631 +0000 UTC m=+1031.701931472" watchObservedRunningTime="2025-12-03 05:58:27.770423596 +0000 UTC m=+1031.705884437" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.780316 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" event={"ID":"055bc13f-a18d-44ce-ab83-c0702e38f1d1","Type":"ContainerStarted","Data":"2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.780425 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerName="dnsmasq-dns" containerID="cri-o://2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa" gracePeriod=10 Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.780449 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.801083 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad","Type":"ContainerStarted","Data":"26135b25a697df12edb2fac87680236617396581660551e65868d5ae03578c46"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.801478 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.805581 4810 generic.go:334] "Generic (PLEG): container finished" podID="be351a50-d782-4c52-98e1-b66372d2b096" containerID="843177296cbaf04307cd93a834d794ce436524f7ca60bf4f3319789a7153f5bb" exitCode=0 Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.805655 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" event={"ID":"be351a50-d782-4c52-98e1-b66372d2b096","Type":"ContainerDied","Data":"843177296cbaf04307cd93a834d794ce436524f7ca60bf4f3319789a7153f5bb"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.805689 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" event={"ID":"be351a50-d782-4c52-98e1-b66372d2b096","Type":"ContainerStarted","Data":"2a139ca149186e6ff3d80d18455356dd673daa4cec1cfe2e3a2b904fb443a260"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.812178 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hthr4" event={"ID":"366d9c09-ff45-486b-957f-abeba4ccfda0","Type":"ContainerStarted","Data":"85b1c8681b11c4f25af7ea18304bceccba622ae58794e2b3b134ec3f4f8474a4"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.829041 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b03355df-e435-4db6-8f0a-10a6618f4bfa","Type":"ContainerStarted","Data":"0983e1946348327938a2a2f057107ba0672c792afc2b635f2ea4e96ed8aab886"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.837155 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" podStartSLOduration=7.715086662 podStartE2EDuration="36.837128685s" podCreationTimestamp="2025-12-03 05:57:51 +0000 UTC" firstStartedPulling="2025-12-03 05:57:52.220225068 +0000 UTC m=+996.155685909" lastFinishedPulling="2025-12-03 05:58:21.342267091 +0000 UTC m=+1025.277727932" observedRunningTime="2025-12-03 05:58:27.824146733 +0000 UTC m=+1031.759607574" watchObservedRunningTime="2025-12-03 05:58:27.837128685 +0000 UTC m=+1031.772589526" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.858137 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8d3f680-ee84-4492-8d18-278d088e1332","Type":"ContainerStarted","Data":"f1ee3359ee9eeba7956806923703966b5c323b7af8b6a9fd4f0c9fd017207e88"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.861507 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=25.812767815 podStartE2EDuration="30.861486627s" podCreationTimestamp="2025-12-03 05:57:57 +0000 UTC" firstStartedPulling="2025-12-03 05:58:21.173435458 +0000 UTC m=+1025.108896319" lastFinishedPulling="2025-12-03 05:58:26.22215429 +0000 UTC m=+1030.157615131" observedRunningTime="2025-12-03 05:58:27.843972675 +0000 UTC m=+1031.779433516" watchObservedRunningTime="2025-12-03 05:58:27.861486627 +0000 UTC m=+1031.796947468" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.864825 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-nb\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.864890 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-dns-svc\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.864909 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-sb\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.864934 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-config\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.864950 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4g7h\" (UniqueName: \"kubernetes.io/projected/27ca79d5-c933-4b20-9f26-9885671647bf-kube-api-access-z4g7h\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.866302 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"907b4534-7daf-4a4f-ae5b-65d58194cabf","Type":"ContainerStarted","Data":"cbebd822881629c73d153c6e4c25da544fdb0914b70346a0f1672e368be4c3df"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.868659 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-nb\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.869258 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-sb\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.870461 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-config\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.870792 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-dns-svc\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.885255 4810 generic.go:334] "Generic (PLEG): container finished" podID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerID="756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39" exitCode=0 Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.885372 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6psd9" event={"ID":"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38","Type":"ContainerDied","Data":"756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.889012 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=15.817597714 podStartE2EDuration="33.888968012s" podCreationTimestamp="2025-12-03 05:57:54 +0000 UTC" firstStartedPulling="2025-12-03 05:58:03.270970535 +0000 UTC m=+1007.206431376" lastFinishedPulling="2025-12-03 05:58:21.342340833 +0000 UTC m=+1025.277801674" observedRunningTime="2025-12-03 05:58:27.886637701 +0000 UTC m=+1031.822098542" watchObservedRunningTime="2025-12-03 05:58:27.888968012 +0000 UTC m=+1031.824428843" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.892085 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4g7h\" (UniqueName: \"kubernetes.io/projected/27ca79d5-c933-4b20-9f26-9885671647bf-kube-api-access-z4g7h\") pod \"dnsmasq-dns-6b8d9f7b57-c7z68\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.893624 4810 generic.go:334] "Generic (PLEG): container finished" podID="b644c5e4-4098-4fbf-8907-00a35976cb46" containerID="82dc8dd5ae9e2e7984731b6603f828b2033fcea8c418f8776928b562617f76e9" exitCode=0 Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.893717 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" event={"ID":"b644c5e4-4098-4fbf-8907-00a35976cb46","Type":"ContainerDied","Data":"82dc8dd5ae9e2e7984731b6603f828b2033fcea8c418f8776928b562617f76e9"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.893812 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" event={"ID":"b644c5e4-4098-4fbf-8907-00a35976cb46","Type":"ContainerStarted","Data":"d66a47b4bfb7a0abcacb8db888dfc2224d256e952d3fb4dbaf3372cd4c30c124"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.921058 4810 generic.go:334] "Generic (PLEG): container finished" podID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" containerID="d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42" exitCode=0 Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.921599 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" event={"ID":"ebca4920-1a0d-4bfa-bec0-0426c75f4447","Type":"ContainerDied","Data":"d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.921636 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" event={"ID":"ebca4920-1a0d-4bfa-bec0-0426c75f4447","Type":"ContainerDied","Data":"70a56d96afa75be976e1163ee21cc2dfefae49708090beedce52d27b69da7983"} Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.921667 4810 scope.go:117] "RemoveContainer" containerID="d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.921819 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55dc666865-wxjgh" Dec 03 05:58:27 crc kubenswrapper[4810]: I1203 05:58:27.989363 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.044131 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55dc666865-wxjgh"] Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.052344 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55dc666865-wxjgh"] Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.185924 4810 scope.go:117] "RemoveContainer" containerID="5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.265915 4810 scope.go:117] "RemoveContainer" containerID="d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.267475 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42\": container with ID starting with d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42 not found: ID does not exist" containerID="d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.267533 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42"} err="failed to get container status \"d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42\": rpc error: code = NotFound desc = could not find container \"d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42\": container with ID starting with d76b6a9bf20a34af50f502dd7deccf4cdbb0e160d43ef750d47c5783e4ac3c42 not found: ID does not exist" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.267570 4810 scope.go:117] "RemoveContainer" containerID="5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.268034 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f\": container with ID starting with 5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f not found: ID does not exist" containerID="5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.268055 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f"} err="failed to get container status \"5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f\": rpc error: code = NotFound desc = could not find container \"5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f\": container with ID starting with 5b8d92963c5776a776b863399bff36a6874c5841243b18f5751ef8ee2d786c9f not found: ID does not exist" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.430921 4810 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 03 05:58:28 crc kubenswrapper[4810]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/b644c5e4-4098-4fbf-8907-00a35976cb46/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 05:58:28 crc kubenswrapper[4810]: > podSandboxID="d66a47b4bfb7a0abcacb8db888dfc2224d256e952d3fb4dbaf3372cd4c30c124" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.431098 4810 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 05:58:28 crc kubenswrapper[4810]: container &Container{Name:dnsmasq-dns,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:2e38c527ddf6e767040136ecf014e7b9,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n654h99h64ch5dbh6dh555h587h64bh5cfh647h5fdh57ch679h9h597h5f5hbch59bh54fh575h566h667h586h5f5h65ch5bch57h68h65ch58bh694h5cfq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s4vx6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-65459b959f-kk7hv_openstack(b644c5e4-4098-4fbf-8907-00a35976cb46): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/b644c5e4-4098-4fbf-8907-00a35976cb46/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 05:58:28 crc kubenswrapper[4810]: > logger="UnhandledError" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.433056 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/b644c5e4-4098-4fbf-8907-00a35976cb46/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" podUID="b644c5e4-4098-4fbf-8907-00a35976cb46" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.442507 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebca4920-1a0d-4bfa-bec0-0426c75f4447" path="/var/lib/kubelet/pods/ebca4920-1a0d-4bfa-bec0-0426c75f4447/volumes" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.483980 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.579490 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-config\") pod \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.579952 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxc2f\" (UniqueName: \"kubernetes.io/projected/055bc13f-a18d-44ce-ab83-c0702e38f1d1-kube-api-access-wxc2f\") pod \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.580238 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-dns-svc\") pod \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\" (UID: \"055bc13f-a18d-44ce-ab83-c0702e38f1d1\") " Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.591101 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/055bc13f-a18d-44ce-ab83-c0702e38f1d1-kube-api-access-wxc2f" (OuterVolumeSpecName: "kube-api-access-wxc2f") pod "055bc13f-a18d-44ce-ab83-c0702e38f1d1" (UID: "055bc13f-a18d-44ce-ab83-c0702e38f1d1"). InnerVolumeSpecName "kube-api-access-wxc2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.637250 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "055bc13f-a18d-44ce-ab83-c0702e38f1d1" (UID: "055bc13f-a18d-44ce-ab83-c0702e38f1d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.641949 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-config" (OuterVolumeSpecName: "config") pod "055bc13f-a18d-44ce-ab83-c0702e38f1d1" (UID: "055bc13f-a18d-44ce-ab83-c0702e38f1d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.679185 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b8d9f7b57-c7z68"] Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.683734 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.683776 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/055bc13f-a18d-44ce-ab83-c0702e38f1d1-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.683786 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxc2f\" (UniqueName: \"kubernetes.io/projected/055bc13f-a18d-44ce-ab83-c0702e38f1d1-kube-api-access-wxc2f\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.698173 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.698858 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerName="dnsmasq-dns" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.698876 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerName="dnsmasq-dns" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.698885 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerName="init" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.698892 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerName="init" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.699074 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerName="dnsmasq-dns" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.707080 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.713211 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.719365 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.719694 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-wwmd6" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.720012 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.724336 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.786631 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.787064 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-cache\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.787099 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.787167 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jbgt\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-kube-api-access-7jbgt\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.787217 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-lock\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.895780 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-cache\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.895847 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.895917 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jbgt\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-kube-api-access-7jbgt\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.895978 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-lock\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.896023 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.896199 4810 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.896214 4810 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 05:58:28 crc kubenswrapper[4810]: E1203 05:58:28.896274 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift podName:229a32b8-4f61-4370-afc5-a5d2ddaf1dc8 nodeName:}" failed. No retries permitted until 2025-12-03 05:58:29.39625172 +0000 UTC m=+1033.331712561 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift") pod "swift-storage-0" (UID: "229a32b8-4f61-4370-afc5-a5d2ddaf1dc8") : configmap "swift-ring-files" not found Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.896295 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-cache\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.896380 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.896538 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-lock\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.939181 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jbgt\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-kube-api-access-7jbgt\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.941622 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6psd9" event={"ID":"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38","Type":"ContainerStarted","Data":"40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb"} Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.945243 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" event={"ID":"be351a50-d782-4c52-98e1-b66372d2b096","Type":"ContainerStarted","Data":"0c6ee9ce3efd7b6c4ee2f0a4ac4831604d503a60397e368913c4ea011e8bf29f"} Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.945401 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.948394 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qd85b" event={"ID":"fe7b8456-b2a9-44b7-b00b-320854a4c571","Type":"ContainerStarted","Data":"af8d55be2784c4df1a1968a97af99476ccf430c53714d98f6a6ab41509fc4133"} Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.960259 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6psd9" podStartSLOduration=14.129697836 podStartE2EDuration="19.960239508s" podCreationTimestamp="2025-12-03 05:58:09 +0000 UTC" firstStartedPulling="2025-12-03 05:58:22.652836648 +0000 UTC m=+1026.588297489" lastFinishedPulling="2025-12-03 05:58:28.48337832 +0000 UTC m=+1032.418839161" observedRunningTime="2025-12-03 05:58:28.958971964 +0000 UTC m=+1032.894432825" watchObservedRunningTime="2025-12-03 05:58:28.960239508 +0000 UTC m=+1032.895700349" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.966339 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.981314 4810 generic.go:334] "Generic (PLEG): container finished" podID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" containerID="2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa" exitCode=0 Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.981610 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.981549 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" event={"ID":"055bc13f-a18d-44ce-ab83-c0702e38f1d1","Type":"ContainerDied","Data":"2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa"} Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.981768 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d9886d5bf-jzz6t" event={"ID":"055bc13f-a18d-44ce-ab83-c0702e38f1d1","Type":"ContainerDied","Data":"4a0133faac69c0df203c2f33518c21e9c011812da54558d164b24a432e4715a6"} Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.981819 4810 scope.go:117] "RemoveContainer" containerID="2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa" Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.983532 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" event={"ID":"27ca79d5-c933-4b20-9f26-9885671647bf","Type":"ContainerStarted","Data":"0092808f9f5fa558d5d7beacf1f50ad1a960c2778101ae6b94ba5da5da34cb64"} Dec 03 05:58:28 crc kubenswrapper[4810]: I1203 05:58:28.987000 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" podStartSLOduration=4.986982833 podStartE2EDuration="4.986982833s" podCreationTimestamp="2025-12-03 05:58:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:58:28.982200027 +0000 UTC m=+1032.917660868" watchObservedRunningTime="2025-12-03 05:58:28.986982833 +0000 UTC m=+1032.922443664" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.014089 4810 scope.go:117] "RemoveContainer" containerID="4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.046718 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d9886d5bf-jzz6t"] Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.058511 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d9886d5bf-jzz6t"] Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.058806 4810 scope.go:117] "RemoveContainer" containerID="2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa" Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.059399 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa\": container with ID starting with 2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa not found: ID does not exist" containerID="2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.060290 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa"} err="failed to get container status \"2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa\": rpc error: code = NotFound desc = could not find container \"2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa\": container with ID starting with 2d1021afe83c75a6a3b739c6c39205524d98fd32e7f9b023dc620768e94fc2aa not found: ID does not exist" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.060382 4810 scope.go:117] "RemoveContainer" containerID="4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee" Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.061509 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee\": container with ID starting with 4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee not found: ID does not exist" containerID="4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.061578 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee"} err="failed to get container status \"4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee\": rpc error: code = NotFound desc = could not find container \"4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee\": container with ID starting with 4d47079d73fb128a435c0144047641168f48e820b3bd66d747345dadfac135ee not found: ID does not exist" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.354353 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-729kr"] Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.355934 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.364286 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-729kr"] Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.370559 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.370766 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.370887 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.393260 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.411972 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-scripts\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412258 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-swiftconf\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412334 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-etc-swift\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412401 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-ring-data-devices\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412539 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-dispersionconf\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412640 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412748 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbwv4\" (UniqueName: \"kubernetes.io/projected/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-kube-api-access-dbwv4\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.412857 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-combined-ca-bundle\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.413113 4810 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.413389 4810 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.413509 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift podName:229a32b8-4f61-4370-afc5-a5d2ddaf1dc8 nodeName:}" failed. No retries permitted until 2025-12-03 05:58:30.413491332 +0000 UTC m=+1034.348952173 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift") pod "swift-storage-0" (UID: "229a32b8-4f61-4370-afc5-a5d2ddaf1dc8") : configmap "swift-ring-files" not found Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.420193 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-7jjx6"] Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.420654 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b644c5e4-4098-4fbf-8907-00a35976cb46" containerName="init" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.420671 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b644c5e4-4098-4fbf-8907-00a35976cb46" containerName="init" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.420884 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b644c5e4-4098-4fbf-8907-00a35976cb46" containerName="init" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.421562 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.429844 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-729kr"] Dec 03 05:58:29 crc kubenswrapper[4810]: E1203 05:58:29.430477 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-dbwv4 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-729kr" podUID="5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.443084 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-7jjx6"] Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514214 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-dns-svc\") pod \"b644c5e4-4098-4fbf-8907-00a35976cb46\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514369 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-config\") pod \"b644c5e4-4098-4fbf-8907-00a35976cb46\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514449 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-sb\") pod \"b644c5e4-4098-4fbf-8907-00a35976cb46\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514484 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-nb\") pod \"b644c5e4-4098-4fbf-8907-00a35976cb46\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514537 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4vx6\" (UniqueName: \"kubernetes.io/projected/b644c5e4-4098-4fbf-8907-00a35976cb46-kube-api-access-s4vx6\") pod \"b644c5e4-4098-4fbf-8907-00a35976cb46\" (UID: \"b644c5e4-4098-4fbf-8907-00a35976cb46\") " Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514833 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-scripts\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514894 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbwv4\" (UniqueName: \"kubernetes.io/projected/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-kube-api-access-dbwv4\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.514930 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-swiftconf\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515075 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-combined-ca-bundle\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515301 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-combined-ca-bundle\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515416 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-scripts\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515438 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-swiftconf\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515454 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-etc-swift\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515474 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-ring-data-devices\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515540 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-dispersionconf\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.515593 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl2sc\" (UniqueName: \"kubernetes.io/projected/d10f8a77-de87-4373-862d-1c5c27744e5a-kube-api-access-fl2sc\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.516328 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-ring-data-devices\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.516344 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-scripts\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.516341 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-etc-swift\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.516794 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-dispersionconf\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.516897 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-ring-data-devices\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.517032 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d10f8a77-de87-4373-862d-1c5c27744e5a-etc-swift\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.518972 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-swiftconf\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.521478 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-dispersionconf\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.521682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-combined-ca-bundle\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.525904 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b644c5e4-4098-4fbf-8907-00a35976cb46-kube-api-access-s4vx6" (OuterVolumeSpecName: "kube-api-access-s4vx6") pod "b644c5e4-4098-4fbf-8907-00a35976cb46" (UID: "b644c5e4-4098-4fbf-8907-00a35976cb46"). InnerVolumeSpecName "kube-api-access-s4vx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.533771 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbwv4\" (UniqueName: \"kubernetes.io/projected/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-kube-api-access-dbwv4\") pod \"swift-ring-rebalance-729kr\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.572837 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-config" (OuterVolumeSpecName: "config") pod "b644c5e4-4098-4fbf-8907-00a35976cb46" (UID: "b644c5e4-4098-4fbf-8907-00a35976cb46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.577366 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b644c5e4-4098-4fbf-8907-00a35976cb46" (UID: "b644c5e4-4098-4fbf-8907-00a35976cb46"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.578311 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b644c5e4-4098-4fbf-8907-00a35976cb46" (UID: "b644c5e4-4098-4fbf-8907-00a35976cb46"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.588853 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b644c5e4-4098-4fbf-8907-00a35976cb46" (UID: "b644c5e4-4098-4fbf-8907-00a35976cb46"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.618932 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-ring-data-devices\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619009 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d10f8a77-de87-4373-862d-1c5c27744e5a-etc-swift\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619044 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-scripts\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619084 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-swiftconf\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619128 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-combined-ca-bundle\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619165 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-dispersionconf\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619189 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl2sc\" (UniqueName: \"kubernetes.io/projected/d10f8a77-de87-4373-862d-1c5c27744e5a-kube-api-access-fl2sc\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619251 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619262 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619271 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619281 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b644c5e4-4098-4fbf-8907-00a35976cb46-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.619290 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4vx6\" (UniqueName: \"kubernetes.io/projected/b644c5e4-4098-4fbf-8907-00a35976cb46-kube-api-access-s4vx6\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.620183 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-scripts\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.620448 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d10f8a77-de87-4373-862d-1c5c27744e5a-etc-swift\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.620700 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-ring-data-devices\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.626121 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-dispersionconf\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.626510 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-combined-ca-bundle\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.627380 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-swiftconf\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.638530 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl2sc\" (UniqueName: \"kubernetes.io/projected/d10f8a77-de87-4373-862d-1c5c27744e5a-kube-api-access-fl2sc\") pod \"swift-ring-rebalance-7jjx6\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.745491 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.993539 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qd85b" event={"ID":"fe7b8456-b2a9-44b7-b00b-320854a4c571","Type":"ContainerStarted","Data":"6bbaa244e3660454bc7e8aab9a40fa955fb11d7d2de04cc02e2a28910eb80df9"} Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.995004 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.995032 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.999172 4810 generic.go:334] "Generic (PLEG): container finished" podID="27ca79d5-c933-4b20-9f26-9885671647bf" containerID="c264160a788ec84b59147de5a590533a756373d3d39419c80a8a46e8eb9aec35" exitCode=0 Dec 03 05:58:29 crc kubenswrapper[4810]: I1203 05:58:29.999220 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" event={"ID":"27ca79d5-c933-4b20-9f26-9885671647bf","Type":"ContainerDied","Data":"c264160a788ec84b59147de5a590533a756373d3d39419c80a8a46e8eb9aec35"} Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.003163 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.003693 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.004866 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65459b959f-kk7hv" event={"ID":"b644c5e4-4098-4fbf-8907-00a35976cb46","Type":"ContainerDied","Data":"d66a47b4bfb7a0abcacb8db888dfc2224d256e952d3fb4dbaf3372cd4c30c124"} Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.004915 4810 scope.go:117] "RemoveContainer" containerID="82dc8dd5ae9e2e7984731b6603f828b2033fcea8c418f8776928b562617f76e9" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.023170 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-qd85b" podStartSLOduration=22.979107028 podStartE2EDuration="29.023151462s" podCreationTimestamp="2025-12-03 05:58:01 +0000 UTC" firstStartedPulling="2025-12-03 05:58:20.143525954 +0000 UTC m=+1024.078986795" lastFinishedPulling="2025-12-03 05:58:26.187570388 +0000 UTC m=+1030.123031229" observedRunningTime="2025-12-03 05:58:30.01664697 +0000 UTC m=+1033.952107821" watchObservedRunningTime="2025-12-03 05:58:30.023151462 +0000 UTC m=+1033.958612303" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.034173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.123939 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65459b959f-kk7hv"] Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127479 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbwv4\" (UniqueName: \"kubernetes.io/projected/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-kube-api-access-dbwv4\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127592 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-ring-data-devices\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127677 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-swiftconf\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127776 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-etc-swift\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127879 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-dispersionconf\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127938 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-scripts\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.127993 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-combined-ca-bundle\") pod \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\" (UID: \"5e627a79-5212-4ea5-83e9-bb0ac4f4d30e\") " Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.129556 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.129882 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.132265 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65459b959f-kk7hv"] Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.133793 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-scripts" (OuterVolumeSpecName: "scripts") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.134010 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.139121 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.139191 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.139207 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-kube-api-access-dbwv4" (OuterVolumeSpecName: "kube-api-access-dbwv4") pod "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" (UID: "5e627a79-5212-4ea5-83e9-bb0ac4f4d30e"). InnerVolumeSpecName "kube-api-access-dbwv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230315 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbwv4\" (UniqueName: \"kubernetes.io/projected/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-kube-api-access-dbwv4\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230349 4810 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230358 4810 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230368 4810 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230377 4810 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230386 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.230395 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.290019 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-7jjx6"] Dec 03 05:58:30 crc kubenswrapper[4810]: W1203 05:58:30.300413 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd10f8a77_de87_4373_862d_1c5c27744e5a.slice/crio-7da2fa4c2ff6da44c2c07b8c3141d6d8147a5de12eef1b70bfbf91383e40aae0 WatchSource:0}: Error finding container 7da2fa4c2ff6da44c2c07b8c3141d6d8147a5de12eef1b70bfbf91383e40aae0: Status 404 returned error can't find the container with id 7da2fa4c2ff6da44c2c07b8c3141d6d8147a5de12eef1b70bfbf91383e40aae0 Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.325971 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.326021 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.375471 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.402389 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="055bc13f-a18d-44ce-ab83-c0702e38f1d1" path="/var/lib/kubelet/pods/055bc13f-a18d-44ce-ab83-c0702e38f1d1/volumes" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.403001 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b644c5e4-4098-4fbf-8907-00a35976cb46" path="/var/lib/kubelet/pods/b644c5e4-4098-4fbf-8907-00a35976cb46/volumes" Dec 03 05:58:30 crc kubenswrapper[4810]: I1203 05:58:30.433201 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:30 crc kubenswrapper[4810]: E1203 05:58:30.433413 4810 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 05:58:30 crc kubenswrapper[4810]: E1203 05:58:30.433439 4810 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 05:58:30 crc kubenswrapper[4810]: E1203 05:58:30.433492 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift podName:229a32b8-4f61-4370-afc5-a5d2ddaf1dc8 nodeName:}" failed. No retries permitted until 2025-12-03 05:58:32.433473074 +0000 UTC m=+1036.368933915 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift") pod "swift-storage-0" (UID: "229a32b8-4f61-4370-afc5-a5d2ddaf1dc8") : configmap "swift-ring-files" not found Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.011704 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7jjx6" event={"ID":"d10f8a77-de87-4373-862d-1c5c27744e5a","Type":"ContainerStarted","Data":"7da2fa4c2ff6da44c2c07b8c3141d6d8147a5de12eef1b70bfbf91383e40aae0"} Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.016404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" event={"ID":"27ca79d5-c933-4b20-9f26-9885671647bf","Type":"ContainerStarted","Data":"bb4e3e2186bf3011210cdcd3aac68a5e99b75c98eca650faeda19817289dcdbc"} Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.016725 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.016804 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-729kr" Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.035313 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" podStartSLOduration=4.035292757 podStartE2EDuration="4.035292757s" podCreationTimestamp="2025-12-03 05:58:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:58:31.0323737 +0000 UTC m=+1034.967834541" watchObservedRunningTime="2025-12-03 05:58:31.035292757 +0000 UTC m=+1034.970753598" Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.086721 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-729kr"] Dec 03 05:58:31 crc kubenswrapper[4810]: I1203 05:58:31.094217 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-729kr"] Dec 03 05:58:32 crc kubenswrapper[4810]: I1203 05:58:32.388894 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e627a79-5212-4ea5-83e9-bb0ac4f4d30e" path="/var/lib/kubelet/pods/5e627a79-5212-4ea5-83e9-bb0ac4f4d30e/volumes" Dec 03 05:58:32 crc kubenswrapper[4810]: I1203 05:58:32.471259 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:32 crc kubenswrapper[4810]: E1203 05:58:32.471463 4810 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 05:58:32 crc kubenswrapper[4810]: E1203 05:58:32.471491 4810 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 05:58:32 crc kubenswrapper[4810]: E1203 05:58:32.471561 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift podName:229a32b8-4f61-4370-afc5-a5d2ddaf1dc8 nodeName:}" failed. No retries permitted until 2025-12-03 05:58:36.47153994 +0000 UTC m=+1040.407000781 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift") pod "swift-storage-0" (UID: "229a32b8-4f61-4370-afc5-a5d2ddaf1dc8") : configmap "swift-ring-files" not found Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.037927 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"907b4534-7daf-4a4f-ae5b-65d58194cabf","Type":"ContainerStarted","Data":"4503fb558f06c2acb1146fa3ecd608f86e96c819a23b56c66e44c2f07ad05293"} Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.041129 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b0b5ee63-b0f3-4133-a294-69ed680c5374","Type":"ContainerStarted","Data":"5aa7068050ca46c232aac34cd3d543a240604be2853665f07e5dbe094b06643a"} Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.044462 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hthr4" event={"ID":"366d9c09-ff45-486b-957f-abeba4ccfda0","Type":"ContainerStarted","Data":"5407c218d750486f81171a6870c6458638be048835c3c598f07e742e95f6397b"} Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.047825 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b03355df-e435-4db6-8f0a-10a6618f4bfa","Type":"ContainerStarted","Data":"dfc7830f279d37d4834ee41ad602dc5a0c6c93fe5fed6b340ad02d6ad5079794"} Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.066479 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=22.191897921 podStartE2EDuration="33.066458712s" podCreationTimestamp="2025-12-03 05:58:00 +0000 UTC" firstStartedPulling="2025-12-03 05:58:21.788970133 +0000 UTC m=+1025.724430974" lastFinishedPulling="2025-12-03 05:58:32.663530914 +0000 UTC m=+1036.598991765" observedRunningTime="2025-12-03 05:58:33.064404298 +0000 UTC m=+1036.999865139" watchObservedRunningTime="2025-12-03 05:58:33.066458712 +0000 UTC m=+1037.001919553" Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.126654 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=19.116511083 podStartE2EDuration="30.126627149s" podCreationTimestamp="2025-12-03 05:58:03 +0000 UTC" firstStartedPulling="2025-12-03 05:58:21.65387313 +0000 UTC m=+1025.589333971" lastFinishedPulling="2025-12-03 05:58:32.663989196 +0000 UTC m=+1036.599450037" observedRunningTime="2025-12-03 05:58:33.126462915 +0000 UTC m=+1037.061923756" watchObservedRunningTime="2025-12-03 05:58:33.126627149 +0000 UTC m=+1037.062087980" Dec 03 05:58:33 crc kubenswrapper[4810]: I1203 05:58:33.145124 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-hthr4" podStartSLOduration=3.136409414 podStartE2EDuration="9.145098686s" podCreationTimestamp="2025-12-03 05:58:24 +0000 UTC" firstStartedPulling="2025-12-03 05:58:26.670882455 +0000 UTC m=+1030.606343296" lastFinishedPulling="2025-12-03 05:58:32.679571727 +0000 UTC m=+1036.615032568" observedRunningTime="2025-12-03 05:58:33.140024682 +0000 UTC m=+1037.075485533" watchObservedRunningTime="2025-12-03 05:58:33.145098686 +0000 UTC m=+1037.080559527" Dec 03 05:58:34 crc kubenswrapper[4810]: I1203 05:58:34.056110 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"43dbe69c-d6b8-4773-ad88-79c3c975afdf","Type":"ContainerStarted","Data":"e4949b492e4394fad97a05af5ecd0f3270a715f053d57898e6bca3965a05913d"} Dec 03 05:58:34 crc kubenswrapper[4810]: I1203 05:58:34.842387 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:34 crc kubenswrapper[4810]: I1203 05:58:34.915071 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:34 crc kubenswrapper[4810]: I1203 05:58:34.960390 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.067260 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.112087 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.141808 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.141878 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.201419 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.756321 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.756364 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 05:58:35 crc kubenswrapper[4810]: I1203 05:58:35.910259 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.120929 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.220144 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.324433 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.327172 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.330366 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-6pg5r" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.330548 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.330704 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.343311 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.365702 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.374887 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.374970 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.374997 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlcww\" (UniqueName: \"kubernetes.io/projected/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-kube-api-access-hlcww\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.377022 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-scripts\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.377075 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.377239 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.377264 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-config\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.480161 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.480261 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-config\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.481856 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.481925 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.481969 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.481993 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlcww\" (UniqueName: \"kubernetes.io/projected/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-kube-api-access-hlcww\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.482053 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.482092 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-scripts\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.482160 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: E1203 05:58:36.483202 4810 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 05:58:36 crc kubenswrapper[4810]: E1203 05:58:36.483231 4810 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 05:58:36 crc kubenswrapper[4810]: E1203 05:58:36.483288 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift podName:229a32b8-4f61-4370-afc5-a5d2ddaf1dc8 nodeName:}" failed. No retries permitted until 2025-12-03 05:58:44.483264361 +0000 UTC m=+1048.418725202 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift") pod "swift-storage-0" (UID: "229a32b8-4f61-4370-afc5-a5d2ddaf1dc8") : configmap "swift-ring-files" not found Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.484246 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-config\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.491072 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.491283 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.506419 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlcww\" (UniqueName: \"kubernetes.io/projected/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-kube-api-access-hlcww\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.519316 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.798524 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f9e5557-2fe8-4d2f-a663-3f015aa61b9e-scripts\") pod \"ovn-northd-0\" (UID: \"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e\") " pod="openstack/ovn-northd-0" Dec 03 05:58:36 crc kubenswrapper[4810]: I1203 05:58:36.960202 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 05:58:37 crc kubenswrapper[4810]: I1203 05:58:37.570074 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 05:58:37 crc kubenswrapper[4810]: I1203 05:58:37.990942 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:58:38 crc kubenswrapper[4810]: I1203 05:58:38.049379 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d57cc48bc-sng7j"] Dec 03 05:58:38 crc kubenswrapper[4810]: I1203 05:58:38.053249 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="dnsmasq-dns" containerID="cri-o://0c6ee9ce3efd7b6c4ee2f0a4ac4831604d503a60397e368913c4ea011e8bf29f" gracePeriod=10 Dec 03 05:58:38 crc kubenswrapper[4810]: I1203 05:58:38.102033 4810 generic.go:334] "Generic (PLEG): container finished" podID="b0b5ee63-b0f3-4133-a294-69ed680c5374" containerID="5aa7068050ca46c232aac34cd3d543a240604be2853665f07e5dbe094b06643a" exitCode=0 Dec 03 05:58:38 crc kubenswrapper[4810]: I1203 05:58:38.102139 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b0b5ee63-b0f3-4133-a294-69ed680c5374","Type":"ContainerDied","Data":"5aa7068050ca46c232aac34cd3d543a240604be2853665f07e5dbe094b06643a"} Dec 03 05:58:39 crc kubenswrapper[4810]: I1203 05:58:39.959549 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.108:5353: connect: connection refused" Dec 03 05:58:40 crc kubenswrapper[4810]: I1203 05:58:40.117925 4810 generic.go:334] "Generic (PLEG): container finished" podID="be351a50-d782-4c52-98e1-b66372d2b096" containerID="0c6ee9ce3efd7b6c4ee2f0a4ac4831604d503a60397e368913c4ea011e8bf29f" exitCode=0 Dec 03 05:58:40 crc kubenswrapper[4810]: I1203 05:58:40.118007 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" event={"ID":"be351a50-d782-4c52-98e1-b66372d2b096","Type":"ContainerDied","Data":"0c6ee9ce3efd7b6c4ee2f0a4ac4831604d503a60397e368913c4ea011e8bf29f"} Dec 03 05:58:40 crc kubenswrapper[4810]: I1203 05:58:40.368344 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:40 crc kubenswrapper[4810]: I1203 05:58:40.425173 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6psd9"] Dec 03 05:58:41 crc kubenswrapper[4810]: I1203 05:58:41.126645 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6psd9" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="registry-server" containerID="cri-o://40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb" gracePeriod=2 Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.670620 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.721460 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819092 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj8v4\" (UniqueName: \"kubernetes.io/projected/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-kube-api-access-sj8v4\") pod \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819154 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-dns-svc\") pod \"be351a50-d782-4c52-98e1-b66372d2b096\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819185 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-ovsdbserver-nb\") pod \"be351a50-d782-4c52-98e1-b66372d2b096\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819243 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-utilities\") pod \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819267 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-catalog-content\") pod \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\" (UID: \"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819324 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwfrz\" (UniqueName: \"kubernetes.io/projected/be351a50-d782-4c52-98e1-b66372d2b096-kube-api-access-mwfrz\") pod \"be351a50-d782-4c52-98e1-b66372d2b096\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.819384 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-config\") pod \"be351a50-d782-4c52-98e1-b66372d2b096\" (UID: \"be351a50-d782-4c52-98e1-b66372d2b096\") " Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.820238 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-utilities" (OuterVolumeSpecName: "utilities") pod "b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" (UID: "b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.824785 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-kube-api-access-sj8v4" (OuterVolumeSpecName: "kube-api-access-sj8v4") pod "b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" (UID: "b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38"). InnerVolumeSpecName "kube-api-access-sj8v4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.827645 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be351a50-d782-4c52-98e1-b66372d2b096-kube-api-access-mwfrz" (OuterVolumeSpecName: "kube-api-access-mwfrz") pod "be351a50-d782-4c52-98e1-b66372d2b096" (UID: "be351a50-d782-4c52-98e1-b66372d2b096"). InnerVolumeSpecName "kube-api-access-mwfrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.840839 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" (UID: "b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.863230 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 05:58:42 crc kubenswrapper[4810]: W1203 05:58:42.865306 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f9e5557_2fe8_4d2f_a663_3f015aa61b9e.slice/crio-cebff7f28ec043a8b5a1e633491034e8c8530d0c1ea0b17826cb029ecab5277f WatchSource:0}: Error finding container cebff7f28ec043a8b5a1e633491034e8c8530d0c1ea0b17826cb029ecab5277f: Status 404 returned error can't find the container with id cebff7f28ec043a8b5a1e633491034e8c8530d0c1ea0b17826cb029ecab5277f Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.866201 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "be351a50-d782-4c52-98e1-b66372d2b096" (UID: "be351a50-d782-4c52-98e1-b66372d2b096"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.868356 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "be351a50-d782-4c52-98e1-b66372d2b096" (UID: "be351a50-d782-4c52-98e1-b66372d2b096"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.882452 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-config" (OuterVolumeSpecName: "config") pod "be351a50-d782-4c52-98e1-b66372d2b096" (UID: "be351a50-d782-4c52-98e1-b66372d2b096"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921797 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921834 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921845 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwfrz\" (UniqueName: \"kubernetes.io/projected/be351a50-d782-4c52-98e1-b66372d2b096-kube-api-access-mwfrz\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921854 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921864 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj8v4\" (UniqueName: \"kubernetes.io/projected/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38-kube-api-access-sj8v4\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921873 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:42 crc kubenswrapper[4810]: I1203 05:58:42.921881 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be351a50-d782-4c52-98e1-b66372d2b096-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.142278 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7jjx6" event={"ID":"d10f8a77-de87-4373-862d-1c5c27744e5a","Type":"ContainerStarted","Data":"a4fb46313a9635d111b412a1ec8e1ebfb6d63f3ab68038d3ec53060b3e5f1796"} Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.148682 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b0b5ee63-b0f3-4133-a294-69ed680c5374","Type":"ContainerStarted","Data":"893a40d7c694c4044cc9d1dff3473a82ad1310910f431d19117a106c88ec506f"} Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.151552 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e","Type":"ContainerStarted","Data":"cebff7f28ec043a8b5a1e633491034e8c8530d0c1ea0b17826cb029ecab5277f"} Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.154473 4810 generic.go:334] "Generic (PLEG): container finished" podID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerID="40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb" exitCode=0 Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.154528 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6psd9" event={"ID":"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38","Type":"ContainerDied","Data":"40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb"} Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.154548 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6psd9" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.154609 4810 scope.go:117] "RemoveContainer" containerID="40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.154559 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6psd9" event={"ID":"b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38","Type":"ContainerDied","Data":"63be1b10591b2fcfb16d58833d73da0554cfd7caff7fcacbb7cfe0c788f1614e"} Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.159930 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" event={"ID":"be351a50-d782-4c52-98e1-b66372d2b096","Type":"ContainerDied","Data":"2a139ca149186e6ff3d80d18455356dd673daa4cec1cfe2e3a2b904fb443a260"} Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.160058 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d57cc48bc-sng7j" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.169676 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-7jjx6" podStartSLOduration=2.055905137 podStartE2EDuration="14.169655822s" podCreationTimestamp="2025-12-03 05:58:29 +0000 UTC" firstStartedPulling="2025-12-03 05:58:30.30250603 +0000 UTC m=+1034.237966871" lastFinishedPulling="2025-12-03 05:58:42.416256695 +0000 UTC m=+1046.351717556" observedRunningTime="2025-12-03 05:58:43.159871145 +0000 UTC m=+1047.095331986" watchObservedRunningTime="2025-12-03 05:58:43.169655822 +0000 UTC m=+1047.105116663" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.181868 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371986.672924 podStartE2EDuration="50.181852063s" podCreationTimestamp="2025-12-03 05:57:53 +0000 UTC" firstStartedPulling="2025-12-03 05:57:54.839072911 +0000 UTC m=+998.774533752" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:58:43.181003741 +0000 UTC m=+1047.116464602" watchObservedRunningTime="2025-12-03 05:58:43.181852063 +0000 UTC m=+1047.117312904" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.196254 4810 scope.go:117] "RemoveContainer" containerID="756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.215825 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d57cc48bc-sng7j"] Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.224353 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d57cc48bc-sng7j"] Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.229884 4810 scope.go:117] "RemoveContainer" containerID="7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.233852 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6psd9"] Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.243467 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6psd9"] Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.253760 4810 scope.go:117] "RemoveContainer" containerID="40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb" Dec 03 05:58:43 crc kubenswrapper[4810]: E1203 05:58:43.254452 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb\": container with ID starting with 40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb not found: ID does not exist" containerID="40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.254485 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb"} err="failed to get container status \"40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb\": rpc error: code = NotFound desc = could not find container \"40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb\": container with ID starting with 40804e8d275b5ba8d05fcb6fca041c0d0262b0721a1378baa8cbbab3e7ba67bb not found: ID does not exist" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.254510 4810 scope.go:117] "RemoveContainer" containerID="756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39" Dec 03 05:58:43 crc kubenswrapper[4810]: E1203 05:58:43.254922 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39\": container with ID starting with 756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39 not found: ID does not exist" containerID="756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.255046 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39"} err="failed to get container status \"756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39\": rpc error: code = NotFound desc = could not find container \"756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39\": container with ID starting with 756001b5f8be7c32449310eb2f2a65670eccc5fd57a1b743c2636edbe1105b39 not found: ID does not exist" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.255148 4810 scope.go:117] "RemoveContainer" containerID="7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e" Dec 03 05:58:43 crc kubenswrapper[4810]: E1203 05:58:43.255577 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e\": container with ID starting with 7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e not found: ID does not exist" containerID="7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.255680 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e"} err="failed to get container status \"7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e\": rpc error: code = NotFound desc = could not find container \"7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e\": container with ID starting with 7a646aca46dfc73ea65f9643255355e3bc6f32667dab70c1deb09b82336b0e5e not found: ID does not exist" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.255859 4810 scope.go:117] "RemoveContainer" containerID="0c6ee9ce3efd7b6c4ee2f0a4ac4831604d503a60397e368913c4ea011e8bf29f" Dec 03 05:58:43 crc kubenswrapper[4810]: I1203 05:58:43.273139 4810 scope.go:117] "RemoveContainer" containerID="843177296cbaf04307cd93a834d794ce436524f7ca60bf4f3319789a7153f5bb" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.178374 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e","Type":"ContainerStarted","Data":"4823c8c6477da3cf7eeefe3d530abea3ad62c33e072ac4ee20993d078c6b8776"} Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.178851 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3f9e5557-2fe8-4d2f-a663-3f015aa61b9e","Type":"ContainerStarted","Data":"b3166e2b8960db06dbf08088f59deba7b4e3b0d40e810085ede99ff3287d161d"} Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.178972 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.206566 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=7.5010517530000005 podStartE2EDuration="8.206539139s" podCreationTimestamp="2025-12-03 05:58:36 +0000 UTC" firstStartedPulling="2025-12-03 05:58:42.868201209 +0000 UTC m=+1046.803662050" lastFinishedPulling="2025-12-03 05:58:43.573688595 +0000 UTC m=+1047.509149436" observedRunningTime="2025-12-03 05:58:44.203621492 +0000 UTC m=+1048.139082353" watchObservedRunningTime="2025-12-03 05:58:44.206539139 +0000 UTC m=+1048.142000010" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.396810 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" path="/var/lib/kubelet/pods/b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38/volumes" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.398275 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be351a50-d782-4c52-98e1-b66372d2b096" path="/var/lib/kubelet/pods/be351a50-d782-4c52-98e1-b66372d2b096/volumes" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.412035 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.412086 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 05:58:44 crc kubenswrapper[4810]: I1203 05:58:44.556921 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:58:44 crc kubenswrapper[4810]: E1203 05:58:44.557157 4810 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 05:58:44 crc kubenswrapper[4810]: E1203 05:58:44.557187 4810 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 05:58:44 crc kubenswrapper[4810]: E1203 05:58:44.557259 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift podName:229a32b8-4f61-4370-afc5-a5d2ddaf1dc8 nodeName:}" failed. No retries permitted until 2025-12-03 05:59:00.557237698 +0000 UTC m=+1064.492698539 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift") pod "swift-storage-0" (UID: "229a32b8-4f61-4370-afc5-a5d2ddaf1dc8") : configmap "swift-ring-files" not found Dec 03 05:58:48 crc kubenswrapper[4810]: I1203 05:58:48.513123 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 05:58:48 crc kubenswrapper[4810]: I1203 05:58:48.607705 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 05:58:51 crc kubenswrapper[4810]: I1203 05:58:51.245021 4810 generic.go:334] "Generic (PLEG): container finished" podID="d10f8a77-de87-4373-862d-1c5c27744e5a" containerID="a4fb46313a9635d111b412a1ec8e1ebfb6d63f3ab68038d3ec53060b3e5f1796" exitCode=0 Dec 03 05:58:51 crc kubenswrapper[4810]: I1203 05:58:51.245116 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7jjx6" event={"ID":"d10f8a77-de87-4373-862d-1c5c27744e5a","Type":"ContainerDied","Data":"a4fb46313a9635d111b412a1ec8e1ebfb6d63f3ab68038d3ec53060b3e5f1796"} Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.571322 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.717046 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-combined-ca-bundle\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.717095 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-dispersionconf\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.717200 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d10f8a77-de87-4373-862d-1c5c27744e5a-etc-swift\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.717233 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-swiftconf\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.718152 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d10f8a77-de87-4373-862d-1c5c27744e5a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.718267 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-ring-data-devices\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.719147 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.719225 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl2sc\" (UniqueName: \"kubernetes.io/projected/d10f8a77-de87-4373-862d-1c5c27744e5a-kube-api-access-fl2sc\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.719253 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-scripts\") pod \"d10f8a77-de87-4373-862d-1c5c27744e5a\" (UID: \"d10f8a77-de87-4373-862d-1c5c27744e5a\") " Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.719973 4810 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d10f8a77-de87-4373-862d-1c5c27744e5a-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.719992 4810 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.723144 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d10f8a77-de87-4373-862d-1c5c27744e5a-kube-api-access-fl2sc" (OuterVolumeSpecName: "kube-api-access-fl2sc") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "kube-api-access-fl2sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.727306 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.739337 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-scripts" (OuterVolumeSpecName: "scripts") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.741038 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.742207 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d10f8a77-de87-4373-862d-1c5c27744e5a" (UID: "d10f8a77-de87-4373-862d-1c5c27744e5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.821611 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.821653 4810 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.821667 4810 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d10f8a77-de87-4373-862d-1c5c27744e5a-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.821679 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl2sc\" (UniqueName: \"kubernetes.io/projected/d10f8a77-de87-4373-862d-1c5c27744e5a-kube-api-access-fl2sc\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:52 crc kubenswrapper[4810]: I1203 05:58:52.821693 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d10f8a77-de87-4373-862d-1c5c27744e5a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:53 crc kubenswrapper[4810]: I1203 05:58:53.262631 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7jjx6" event={"ID":"d10f8a77-de87-4373-862d-1c5c27744e5a","Type":"ContainerDied","Data":"7da2fa4c2ff6da44c2c07b8c3141d6d8147a5de12eef1b70bfbf91383e40aae0"} Dec 03 05:58:53 crc kubenswrapper[4810]: I1203 05:58:53.262678 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7da2fa4c2ff6da44c2c07b8c3141d6d8147a5de12eef1b70bfbf91383e40aae0" Dec 03 05:58:53 crc kubenswrapper[4810]: I1203 05:58:53.262687 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7jjx6" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.287583 4810 generic.go:334] "Generic (PLEG): container finished" podID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerID="af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb" exitCode=0 Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.287650 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"55cb8ef9-3722-41ab-8655-ccb1508619fd","Type":"ContainerDied","Data":"af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb"} Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.629214 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-1d40-account-create-update-88mwj"] Dec 03 05:58:55 crc kubenswrapper[4810]: E1203 05:58:55.630124 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="dnsmasq-dns" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630152 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="dnsmasq-dns" Dec 03 05:58:55 crc kubenswrapper[4810]: E1203 05:58:55.630170 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d10f8a77-de87-4373-862d-1c5c27744e5a" containerName="swift-ring-rebalance" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630178 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d10f8a77-de87-4373-862d-1c5c27744e5a" containerName="swift-ring-rebalance" Dec 03 05:58:55 crc kubenswrapper[4810]: E1203 05:58:55.630192 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="init" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630199 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="init" Dec 03 05:58:55 crc kubenswrapper[4810]: E1203 05:58:55.630214 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="extract-content" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630221 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="extract-content" Dec 03 05:58:55 crc kubenswrapper[4810]: E1203 05:58:55.630240 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="extract-utilities" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630247 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="extract-utilities" Dec 03 05:58:55 crc kubenswrapper[4810]: E1203 05:58:55.630279 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="registry-server" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630288 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="registry-server" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630480 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0a3af64-f45a-4cfe-b6d8-0e81b0f4be38" containerName="registry-server" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630501 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="be351a50-d782-4c52-98e1-b66372d2b096" containerName="dnsmasq-dns" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.630516 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d10f8a77-de87-4373-862d-1c5c27744e5a" containerName="swift-ring-rebalance" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.631294 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.633652 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.636899 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-qbv8v"] Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.638255 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.646630 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1d40-account-create-update-88mwj"] Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.653639 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-qbv8v"] Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.771779 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ph88\" (UniqueName: \"kubernetes.io/projected/92911999-e22a-4a27-ab41-8fe265136906-kube-api-access-9ph88\") pod \"keystone-1d40-account-create-update-88mwj\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.771873 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a713ace-ef43-4698-9691-e961d0cf1b48-operator-scripts\") pod \"keystone-db-create-qbv8v\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.771903 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw9c7\" (UniqueName: \"kubernetes.io/projected/1a713ace-ef43-4698-9691-e961d0cf1b48-kube-api-access-nw9c7\") pod \"keystone-db-create-qbv8v\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.772061 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92911999-e22a-4a27-ab41-8fe265136906-operator-scripts\") pod \"keystone-1d40-account-create-update-88mwj\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.873560 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92911999-e22a-4a27-ab41-8fe265136906-operator-scripts\") pod \"keystone-1d40-account-create-update-88mwj\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.873656 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ph88\" (UniqueName: \"kubernetes.io/projected/92911999-e22a-4a27-ab41-8fe265136906-kube-api-access-9ph88\") pod \"keystone-1d40-account-create-update-88mwj\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.873722 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a713ace-ef43-4698-9691-e961d0cf1b48-operator-scripts\") pod \"keystone-db-create-qbv8v\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.873759 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw9c7\" (UniqueName: \"kubernetes.io/projected/1a713ace-ef43-4698-9691-e961d0cf1b48-kube-api-access-nw9c7\") pod \"keystone-db-create-qbv8v\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.874674 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a713ace-ef43-4698-9691-e961d0cf1b48-operator-scripts\") pod \"keystone-db-create-qbv8v\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.874942 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92911999-e22a-4a27-ab41-8fe265136906-operator-scripts\") pod \"keystone-1d40-account-create-update-88mwj\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.892931 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw9c7\" (UniqueName: \"kubernetes.io/projected/1a713ace-ef43-4698-9691-e961d0cf1b48-kube-api-access-nw9c7\") pod \"keystone-db-create-qbv8v\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.898454 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ph88\" (UniqueName: \"kubernetes.io/projected/92911999-e22a-4a27-ab41-8fe265136906-kube-api-access-9ph88\") pod \"keystone-1d40-account-create-update-88mwj\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.943112 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-f5r7f"] Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.946076 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.952673 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-f5r7f"] Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.953092 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:55 crc kubenswrapper[4810]: I1203 05:58:55.967394 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.024788 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-fa4a-account-create-update-rv5r5"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.027141 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.029564 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.034396 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-fa4a-account-create-update-rv5r5"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.076708 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1344a6a7-87f7-4e67-abfd-16d32643169b-operator-scripts\") pod \"placement-db-create-f5r7f\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.079953 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq6rl\" (UniqueName: \"kubernetes.io/projected/1344a6a7-87f7-4e67-abfd-16d32643169b-kube-api-access-rq6rl\") pod \"placement-db-create-f5r7f\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.181605 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq6rl\" (UniqueName: \"kubernetes.io/projected/1344a6a7-87f7-4e67-abfd-16d32643169b-kube-api-access-rq6rl\") pod \"placement-db-create-f5r7f\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.181721 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1344a6a7-87f7-4e67-abfd-16d32643169b-operator-scripts\") pod \"placement-db-create-f5r7f\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.181762 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l85rr\" (UniqueName: \"kubernetes.io/projected/7582561b-f4c4-4695-8173-2dc6a168d119-kube-api-access-l85rr\") pod \"placement-fa4a-account-create-update-rv5r5\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.181848 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7582561b-f4c4-4695-8173-2dc6a168d119-operator-scripts\") pod \"placement-fa4a-account-create-update-rv5r5\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.182600 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1344a6a7-87f7-4e67-abfd-16d32643169b-operator-scripts\") pod \"placement-db-create-f5r7f\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.199974 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq6rl\" (UniqueName: \"kubernetes.io/projected/1344a6a7-87f7-4e67-abfd-16d32643169b-kube-api-access-rq6rl\") pod \"placement-db-create-f5r7f\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.225523 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-576sj"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.226936 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.235039 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-576sj"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.283709 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l85rr\" (UniqueName: \"kubernetes.io/projected/7582561b-f4c4-4695-8173-2dc6a168d119-kube-api-access-l85rr\") pod \"placement-fa4a-account-create-update-rv5r5\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.283940 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7582561b-f4c4-4695-8173-2dc6a168d119-operator-scripts\") pod \"placement-fa4a-account-create-update-rv5r5\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.286003 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7582561b-f4c4-4695-8173-2dc6a168d119-operator-scripts\") pod \"placement-fa4a-account-create-update-rv5r5\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.305380 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l85rr\" (UniqueName: \"kubernetes.io/projected/7582561b-f4c4-4695-8173-2dc6a168d119-kube-api-access-l85rr\") pod \"placement-fa4a-account-create-update-rv5r5\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.316608 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.324468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"55cb8ef9-3722-41ab-8655-ccb1508619fd","Type":"ContainerStarted","Data":"987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951"} Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.324799 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.334384 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5e23-account-create-update-nkzsd"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.336193 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.338898 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.340874 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5e23-account-create-update-nkzsd"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.346335 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.352461 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.796697537 podStartE2EDuration="1m5.352437378s" podCreationTimestamp="2025-12-03 05:57:51 +0000 UTC" firstStartedPulling="2025-12-03 05:57:53.524865849 +0000 UTC m=+997.460326690" lastFinishedPulling="2025-12-03 05:58:21.08060569 +0000 UTC m=+1025.016066531" observedRunningTime="2025-12-03 05:58:56.346361388 +0000 UTC m=+1060.281822229" watchObservedRunningTime="2025-12-03 05:58:56.352437378 +0000 UTC m=+1060.287898219" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.386081 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h784s\" (UniqueName: \"kubernetes.io/projected/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-kube-api-access-h784s\") pod \"glance-db-create-576sj\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.386189 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-operator-scripts\") pod \"glance-db-create-576sj\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.430231 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-qbv8v"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.489699 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h784s\" (UniqueName: \"kubernetes.io/projected/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-kube-api-access-h784s\") pod \"glance-db-create-576sj\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.489775 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5796a4ae-d33d-479d-89d3-a23c76988ffa-operator-scripts\") pod \"glance-5e23-account-create-update-nkzsd\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.489852 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-operator-scripts\") pod \"glance-db-create-576sj\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.489934 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4dfj\" (UniqueName: \"kubernetes.io/projected/5796a4ae-d33d-479d-89d3-a23c76988ffa-kube-api-access-q4dfj\") pod \"glance-5e23-account-create-update-nkzsd\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.495228 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-operator-scripts\") pod \"glance-db-create-576sj\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.514759 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h784s\" (UniqueName: \"kubernetes.io/projected/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-kube-api-access-h784s\") pod \"glance-db-create-576sj\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.516498 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1d40-account-create-update-88mwj"] Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.541967 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4x8tn" podUID="0df96f16-d193-4ecc-a624-e721c61a42af" containerName="ovn-controller" probeResult="failure" output=< Dec 03 05:58:56 crc kubenswrapper[4810]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 05:58:56 crc kubenswrapper[4810]: > Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.548262 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-576sj" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.591384 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4dfj\" (UniqueName: \"kubernetes.io/projected/5796a4ae-d33d-479d-89d3-a23c76988ffa-kube-api-access-q4dfj\") pod \"glance-5e23-account-create-update-nkzsd\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.591539 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5796a4ae-d33d-479d-89d3-a23c76988ffa-operator-scripts\") pod \"glance-5e23-account-create-update-nkzsd\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.592505 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5796a4ae-d33d-479d-89d3-a23c76988ffa-operator-scripts\") pod \"glance-5e23-account-create-update-nkzsd\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.613671 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4dfj\" (UniqueName: \"kubernetes.io/projected/5796a4ae-d33d-479d-89d3-a23c76988ffa-kube-api-access-q4dfj\") pod \"glance-5e23-account-create-update-nkzsd\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.643322 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-f5r7f"] Dec 03 05:58:56 crc kubenswrapper[4810]: W1203 05:58:56.653904 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1344a6a7_87f7_4e67_abfd_16d32643169b.slice/crio-6b08cd60188b11232af3ff3f2fdf733ecaa653d563586066ad53f634a6324321 WatchSource:0}: Error finding container 6b08cd60188b11232af3ff3f2fdf733ecaa653d563586066ad53f634a6324321: Status 404 returned error can't find the container with id 6b08cd60188b11232af3ff3f2fdf733ecaa653d563586066ad53f634a6324321 Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.732650 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:56 crc kubenswrapper[4810]: I1203 05:58:56.919464 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-fa4a-account-create-update-rv5r5"] Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.023046 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.045237 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-576sj"] Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.229035 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5e23-account-create-update-nkzsd"] Dec 03 05:58:57 crc kubenswrapper[4810]: W1203 05:58:57.231081 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5796a4ae_d33d_479d_89d3_a23c76988ffa.slice/crio-2330d53a4979b2cf67f1becf28df65d4a7d23735a0f131e3daad267d5bd32fd5 WatchSource:0}: Error finding container 2330d53a4979b2cf67f1becf28df65d4a7d23735a0f131e3daad267d5bd32fd5: Status 404 returned error can't find the container with id 2330d53a4979b2cf67f1becf28df65d4a7d23735a0f131e3daad267d5bd32fd5 Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.336492 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fa4a-account-create-update-rv5r5" event={"ID":"7582561b-f4c4-4695-8173-2dc6a168d119","Type":"ContainerStarted","Data":"30f05921d643db8f8496f5f79f09818910aedf90f3eaf057164509c70b8ac31d"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.336545 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fa4a-account-create-update-rv5r5" event={"ID":"7582561b-f4c4-4695-8173-2dc6a168d119","Type":"ContainerStarted","Data":"04dadcbabc455c08f48984fa7859a239461648367553f0d03fe1da7878d32c3b"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.342399 4810 generic.go:334] "Generic (PLEG): container finished" podID="92911999-e22a-4a27-ab41-8fe265136906" containerID="6c19beacf8ffa1a80ffeb23c82e1b2849c01c91c6d1f14f24e9823ceba8ac4a5" exitCode=0 Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.342448 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1d40-account-create-update-88mwj" event={"ID":"92911999-e22a-4a27-ab41-8fe265136906","Type":"ContainerDied","Data":"6c19beacf8ffa1a80ffeb23c82e1b2849c01c91c6d1f14f24e9823ceba8ac4a5"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.342545 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1d40-account-create-update-88mwj" event={"ID":"92911999-e22a-4a27-ab41-8fe265136906","Type":"ContainerStarted","Data":"15223c5aab5611a8b6543f79fd24aff7b0ac62e3a1532e03841ab48cc2b2914d"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.375287 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-576sj" event={"ID":"ee6fdc90-8159-4a54-8758-74a9fdcd83f7","Type":"ContainerStarted","Data":"ddd9e693285d5135ceccf43ad4406d38f2ca56fba08ca7bc3a7bc0e805349e6d"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.375392 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-576sj" event={"ID":"ee6fdc90-8159-4a54-8758-74a9fdcd83f7","Type":"ContainerStarted","Data":"babe70e5a05192e5c0fd5594f1e580607c2793c5cce6a5b60e7bf51477f42a97"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.387560 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e23-account-create-update-nkzsd" event={"ID":"5796a4ae-d33d-479d-89d3-a23c76988ffa","Type":"ContainerStarted","Data":"2330d53a4979b2cf67f1becf28df65d4a7d23735a0f131e3daad267d5bd32fd5"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.388693 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-fa4a-account-create-update-rv5r5" podStartSLOduration=1.388513144 podStartE2EDuration="1.388513144s" podCreationTimestamp="2025-12-03 05:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:58:57.367594953 +0000 UTC m=+1061.303055794" watchObservedRunningTime="2025-12-03 05:58:57.388513144 +0000 UTC m=+1061.323973975" Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.389484 4810 generic.go:334] "Generic (PLEG): container finished" podID="1a713ace-ef43-4698-9691-e961d0cf1b48" containerID="7bf074001fa81733ac1008f1545da347c8d746b94dd3f2e3b231f5ebfa756a12" exitCode=0 Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.389562 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qbv8v" event={"ID":"1a713ace-ef43-4698-9691-e961d0cf1b48","Type":"ContainerDied","Data":"7bf074001fa81733ac1008f1545da347c8d746b94dd3f2e3b231f5ebfa756a12"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.389599 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qbv8v" event={"ID":"1a713ace-ef43-4698-9691-e961d0cf1b48","Type":"ContainerStarted","Data":"e2269c5b7983bb418139095c43cd559216cf36fa61282c8b4b4b162c4492ba7b"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.396923 4810 generic.go:334] "Generic (PLEG): container finished" podID="1344a6a7-87f7-4e67-abfd-16d32643169b" containerID="1e3e75cecd55f1f40db1ab1f4e8a8491d6d21c0e6d027671d830acf24a4d5b4f" exitCode=0 Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.397045 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-f5r7f" event={"ID":"1344a6a7-87f7-4e67-abfd-16d32643169b","Type":"ContainerDied","Data":"1e3e75cecd55f1f40db1ab1f4e8a8491d6d21c0e6d027671d830acf24a4d5b4f"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.397088 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-f5r7f" event={"ID":"1344a6a7-87f7-4e67-abfd-16d32643169b","Type":"ContainerStarted","Data":"6b08cd60188b11232af3ff3f2fdf733ecaa653d563586066ad53f634a6324321"} Dec 03 05:58:57 crc kubenswrapper[4810]: I1203 05:58:57.398566 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-576sj" podStartSLOduration=1.398543568 podStartE2EDuration="1.398543568s" podCreationTimestamp="2025-12-03 05:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:58:57.396587596 +0000 UTC m=+1061.332048437" watchObservedRunningTime="2025-12-03 05:58:57.398543568 +0000 UTC m=+1061.334004409" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.409909 4810 generic.go:334] "Generic (PLEG): container finished" podID="7582561b-f4c4-4695-8173-2dc6a168d119" containerID="30f05921d643db8f8496f5f79f09818910aedf90f3eaf057164509c70b8ac31d" exitCode=0 Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.410344 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fa4a-account-create-update-rv5r5" event={"ID":"7582561b-f4c4-4695-8173-2dc6a168d119","Type":"ContainerDied","Data":"30f05921d643db8f8496f5f79f09818910aedf90f3eaf057164509c70b8ac31d"} Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.412394 4810 generic.go:334] "Generic (PLEG): container finished" podID="ee6fdc90-8159-4a54-8758-74a9fdcd83f7" containerID="ddd9e693285d5135ceccf43ad4406d38f2ca56fba08ca7bc3a7bc0e805349e6d" exitCode=0 Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.412471 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-576sj" event={"ID":"ee6fdc90-8159-4a54-8758-74a9fdcd83f7","Type":"ContainerDied","Data":"ddd9e693285d5135ceccf43ad4406d38f2ca56fba08ca7bc3a7bc0e805349e6d"} Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.414159 4810 generic.go:334] "Generic (PLEG): container finished" podID="5796a4ae-d33d-479d-89d3-a23c76988ffa" containerID="ead0de28b374408e2839b099760e73945919ffc25dd29b2bdec3b43697d03f6d" exitCode=0 Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.414259 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e23-account-create-update-nkzsd" event={"ID":"5796a4ae-d33d-479d-89d3-a23c76988ffa","Type":"ContainerDied","Data":"ead0de28b374408e2839b099760e73945919ffc25dd29b2bdec3b43697d03f6d"} Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.739647 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.818072 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.829145 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1344a6a7-87f7-4e67-abfd-16d32643169b-operator-scripts\") pod \"1344a6a7-87f7-4e67-abfd-16d32643169b\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.829280 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq6rl\" (UniqueName: \"kubernetes.io/projected/1344a6a7-87f7-4e67-abfd-16d32643169b-kube-api-access-rq6rl\") pod \"1344a6a7-87f7-4e67-abfd-16d32643169b\" (UID: \"1344a6a7-87f7-4e67-abfd-16d32643169b\") " Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.829842 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1344a6a7-87f7-4e67-abfd-16d32643169b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1344a6a7-87f7-4e67-abfd-16d32643169b" (UID: "1344a6a7-87f7-4e67-abfd-16d32643169b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.830169 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1344a6a7-87f7-4e67-abfd-16d32643169b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.836233 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1344a6a7-87f7-4e67-abfd-16d32643169b-kube-api-access-rq6rl" (OuterVolumeSpecName: "kube-api-access-rq6rl") pod "1344a6a7-87f7-4e67-abfd-16d32643169b" (UID: "1344a6a7-87f7-4e67-abfd-16d32643169b"). InnerVolumeSpecName "kube-api-access-rq6rl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.880013 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.930800 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a713ace-ef43-4698-9691-e961d0cf1b48-operator-scripts\") pod \"1a713ace-ef43-4698-9691-e961d0cf1b48\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.930868 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92911999-e22a-4a27-ab41-8fe265136906-operator-scripts\") pod \"92911999-e22a-4a27-ab41-8fe265136906\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.930893 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw9c7\" (UniqueName: \"kubernetes.io/projected/1a713ace-ef43-4698-9691-e961d0cf1b48-kube-api-access-nw9c7\") pod \"1a713ace-ef43-4698-9691-e961d0cf1b48\" (UID: \"1a713ace-ef43-4698-9691-e961d0cf1b48\") " Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.930996 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ph88\" (UniqueName: \"kubernetes.io/projected/92911999-e22a-4a27-ab41-8fe265136906-kube-api-access-9ph88\") pod \"92911999-e22a-4a27-ab41-8fe265136906\" (UID: \"92911999-e22a-4a27-ab41-8fe265136906\") " Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.931348 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq6rl\" (UniqueName: \"kubernetes.io/projected/1344a6a7-87f7-4e67-abfd-16d32643169b-kube-api-access-rq6rl\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.932401 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92911999-e22a-4a27-ab41-8fe265136906-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "92911999-e22a-4a27-ab41-8fe265136906" (UID: "92911999-e22a-4a27-ab41-8fe265136906"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.932486 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a713ace-ef43-4698-9691-e961d0cf1b48-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a713ace-ef43-4698-9691-e961d0cf1b48" (UID: "1a713ace-ef43-4698-9691-e961d0cf1b48"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.936405 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a713ace-ef43-4698-9691-e961d0cf1b48-kube-api-access-nw9c7" (OuterVolumeSpecName: "kube-api-access-nw9c7") pod "1a713ace-ef43-4698-9691-e961d0cf1b48" (UID: "1a713ace-ef43-4698-9691-e961d0cf1b48"). InnerVolumeSpecName "kube-api-access-nw9c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:58 crc kubenswrapper[4810]: I1203 05:58:58.937067 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92911999-e22a-4a27-ab41-8fe265136906-kube-api-access-9ph88" (OuterVolumeSpecName: "kube-api-access-9ph88") pod "92911999-e22a-4a27-ab41-8fe265136906" (UID: "92911999-e22a-4a27-ab41-8fe265136906"). InnerVolumeSpecName "kube-api-access-9ph88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.033888 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92911999-e22a-4a27-ab41-8fe265136906-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.034277 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw9c7\" (UniqueName: \"kubernetes.io/projected/1a713ace-ef43-4698-9691-e961d0cf1b48-kube-api-access-nw9c7\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.034299 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ph88\" (UniqueName: \"kubernetes.io/projected/92911999-e22a-4a27-ab41-8fe265136906-kube-api-access-9ph88\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.034318 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a713ace-ef43-4698-9691-e961d0cf1b48-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.427349 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1d40-account-create-update-88mwj" event={"ID":"92911999-e22a-4a27-ab41-8fe265136906","Type":"ContainerDied","Data":"15223c5aab5611a8b6543f79fd24aff7b0ac62e3a1532e03841ab48cc2b2914d"} Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.427412 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15223c5aab5611a8b6543f79fd24aff7b0ac62e3a1532e03841ab48cc2b2914d" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.427446 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d40-account-create-update-88mwj" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.429505 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-f5r7f" event={"ID":"1344a6a7-87f7-4e67-abfd-16d32643169b","Type":"ContainerDied","Data":"6b08cd60188b11232af3ff3f2fdf733ecaa653d563586066ad53f634a6324321"} Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.429604 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b08cd60188b11232af3ff3f2fdf733ecaa653d563586066ad53f634a6324321" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.429880 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-f5r7f" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.431360 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qbv8v" event={"ID":"1a713ace-ef43-4698-9691-e961d0cf1b48","Type":"ContainerDied","Data":"e2269c5b7983bb418139095c43cd559216cf36fa61282c8b4b4b162c4492ba7b"} Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.431392 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2269c5b7983bb418139095c43cd559216cf36fa61282c8b4b4b162c4492ba7b" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.431652 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qbv8v" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.807382 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.853078 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4dfj\" (UniqueName: \"kubernetes.io/projected/5796a4ae-d33d-479d-89d3-a23c76988ffa-kube-api-access-q4dfj\") pod \"5796a4ae-d33d-479d-89d3-a23c76988ffa\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.853228 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5796a4ae-d33d-479d-89d3-a23c76988ffa-operator-scripts\") pod \"5796a4ae-d33d-479d-89d3-a23c76988ffa\" (UID: \"5796a4ae-d33d-479d-89d3-a23c76988ffa\") " Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.853749 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5796a4ae-d33d-479d-89d3-a23c76988ffa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5796a4ae-d33d-479d-89d3-a23c76988ffa" (UID: "5796a4ae-d33d-479d-89d3-a23c76988ffa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.854002 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5796a4ae-d33d-479d-89d3-a23c76988ffa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.863606 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5796a4ae-d33d-479d-89d3-a23c76988ffa-kube-api-access-q4dfj" (OuterVolumeSpecName: "kube-api-access-q4dfj") pod "5796a4ae-d33d-479d-89d3-a23c76988ffa" (UID: "5796a4ae-d33d-479d-89d3-a23c76988ffa"). InnerVolumeSpecName "kube-api-access-q4dfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.933194 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-576sj" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.938870 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:58:59 crc kubenswrapper[4810]: I1203 05:58:59.959993 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4dfj\" (UniqueName: \"kubernetes.io/projected/5796a4ae-d33d-479d-89d3-a23c76988ffa-kube-api-access-q4dfj\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.061608 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-operator-scripts\") pod \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.061699 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h784s\" (UniqueName: \"kubernetes.io/projected/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-kube-api-access-h784s\") pod \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\" (UID: \"ee6fdc90-8159-4a54-8758-74a9fdcd83f7\") " Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.061838 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7582561b-f4c4-4695-8173-2dc6a168d119-operator-scripts\") pod \"7582561b-f4c4-4695-8173-2dc6a168d119\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.061918 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l85rr\" (UniqueName: \"kubernetes.io/projected/7582561b-f4c4-4695-8173-2dc6a168d119-kube-api-access-l85rr\") pod \"7582561b-f4c4-4695-8173-2dc6a168d119\" (UID: \"7582561b-f4c4-4695-8173-2dc6a168d119\") " Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.062662 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7582561b-f4c4-4695-8173-2dc6a168d119-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7582561b-f4c4-4695-8173-2dc6a168d119" (UID: "7582561b-f4c4-4695-8173-2dc6a168d119"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.062692 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ee6fdc90-8159-4a54-8758-74a9fdcd83f7" (UID: "ee6fdc90-8159-4a54-8758-74a9fdcd83f7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.067579 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-kube-api-access-h784s" (OuterVolumeSpecName: "kube-api-access-h784s") pod "ee6fdc90-8159-4a54-8758-74a9fdcd83f7" (UID: "ee6fdc90-8159-4a54-8758-74a9fdcd83f7"). InnerVolumeSpecName "kube-api-access-h784s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.067682 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7582561b-f4c4-4695-8173-2dc6a168d119-kube-api-access-l85rr" (OuterVolumeSpecName: "kube-api-access-l85rr") pod "7582561b-f4c4-4695-8173-2dc6a168d119" (UID: "7582561b-f4c4-4695-8173-2dc6a168d119"). InnerVolumeSpecName "kube-api-access-l85rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.164544 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.165120 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h784s\" (UniqueName: \"kubernetes.io/projected/ee6fdc90-8159-4a54-8758-74a9fdcd83f7-kube-api-access-h784s\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.165145 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7582561b-f4c4-4695-8173-2dc6a168d119-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.165165 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l85rr\" (UniqueName: \"kubernetes.io/projected/7582561b-f4c4-4695-8173-2dc6a168d119-kube-api-access-l85rr\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.479310 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e23-account-create-update-nkzsd" event={"ID":"5796a4ae-d33d-479d-89d3-a23c76988ffa","Type":"ContainerDied","Data":"2330d53a4979b2cf67f1becf28df65d4a7d23735a0f131e3daad267d5bd32fd5"} Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.479433 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2330d53a4979b2cf67f1becf28df65d4a7d23735a0f131e3daad267d5bd32fd5" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.479800 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e23-account-create-update-nkzsd" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.481338 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fa4a-account-create-update-rv5r5" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.481382 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fa4a-account-create-update-rv5r5" event={"ID":"7582561b-f4c4-4695-8173-2dc6a168d119","Type":"ContainerDied","Data":"04dadcbabc455c08f48984fa7859a239461648367553f0d03fe1da7878d32c3b"} Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.481431 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04dadcbabc455c08f48984fa7859a239461648367553f0d03fe1da7878d32c3b" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.483982 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-576sj" event={"ID":"ee6fdc90-8159-4a54-8758-74a9fdcd83f7","Type":"ContainerDied","Data":"babe70e5a05192e5c0fd5594f1e580607c2793c5cce6a5b60e7bf51477f42a97"} Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.484021 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="babe70e5a05192e5c0fd5594f1e580607c2793c5cce6a5b60e7bf51477f42a97" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.484068 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-576sj" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.574418 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.581243 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/229a32b8-4f61-4370-afc5-a5d2ddaf1dc8-etc-swift\") pod \"swift-storage-0\" (UID: \"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8\") " pod="openstack/swift-storage-0" Dec 03 05:59:00 crc kubenswrapper[4810]: I1203 05:59:00.855022 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.428517 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 05:59:01 crc kubenswrapper[4810]: W1203 05:59:01.433762 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod229a32b8_4f61_4370_afc5_a5d2ddaf1dc8.slice/crio-acf8ed152790f574a94ab58feec10cdde839e70dadcaa7d8566e85b65c765560 WatchSource:0}: Error finding container acf8ed152790f574a94ab58feec10cdde839e70dadcaa7d8566e85b65c765560: Status 404 returned error can't find the container with id acf8ed152790f574a94ab58feec10cdde839e70dadcaa7d8566e85b65c765560 Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.493375 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"acf8ed152790f574a94ab58feec10cdde839e70dadcaa7d8566e85b65c765560"} Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499263 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-xwrlk"] Dec 03 05:59:01 crc kubenswrapper[4810]: E1203 05:59:01.499824 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a713ace-ef43-4698-9691-e961d0cf1b48" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499839 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a713ace-ef43-4698-9691-e961d0cf1b48" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: E1203 05:59:01.499851 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92911999-e22a-4a27-ab41-8fe265136906" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499861 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="92911999-e22a-4a27-ab41-8fe265136906" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: E1203 05:59:01.499871 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5796a4ae-d33d-479d-89d3-a23c76988ffa" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499877 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5796a4ae-d33d-479d-89d3-a23c76988ffa" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: E1203 05:59:01.499891 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee6fdc90-8159-4a54-8758-74a9fdcd83f7" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499897 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee6fdc90-8159-4a54-8758-74a9fdcd83f7" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: E1203 05:59:01.499913 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1344a6a7-87f7-4e67-abfd-16d32643169b" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499919 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1344a6a7-87f7-4e67-abfd-16d32643169b" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: E1203 05:59:01.499934 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7582561b-f4c4-4695-8173-2dc6a168d119" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.499940 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7582561b-f4c4-4695-8173-2dc6a168d119" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500159 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7582561b-f4c4-4695-8173-2dc6a168d119" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500178 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="5796a4ae-d33d-479d-89d3-a23c76988ffa" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500189 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="92911999-e22a-4a27-ab41-8fe265136906" containerName="mariadb-account-create-update" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500202 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1344a6a7-87f7-4e67-abfd-16d32643169b" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500213 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee6fdc90-8159-4a54-8758-74a9fdcd83f7" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500224 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a713ace-ef43-4698-9691-e961d0cf1b48" containerName="mariadb-database-create" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.500946 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.504466 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.504516 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-qfwpn" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.510292 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-xwrlk"] Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.552439 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4x8tn" podUID="0df96f16-d193-4ecc-a624-e721c61a42af" containerName="ovn-controller" probeResult="failure" output=< Dec 03 05:59:01 crc kubenswrapper[4810]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 05:59:01 crc kubenswrapper[4810]: > Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.594025 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-config-data\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.594552 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-db-sync-config-data\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.594590 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2pz8\" (UniqueName: \"kubernetes.io/projected/dd194c59-21ca-4b1f-b269-a2844d332781-kube-api-access-k2pz8\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.594620 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-combined-ca-bundle\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.596033 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.598285 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qd85b" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.703358 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-config-data\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.703477 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-db-sync-config-data\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.704834 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2pz8\" (UniqueName: \"kubernetes.io/projected/dd194c59-21ca-4b1f-b269-a2844d332781-kube-api-access-k2pz8\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.704868 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-combined-ca-bundle\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.712686 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-config-data\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.721668 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-combined-ca-bundle\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.722247 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-db-sync-config-data\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.735087 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2pz8\" (UniqueName: \"kubernetes.io/projected/dd194c59-21ca-4b1f-b269-a2844d332781-kube-api-access-k2pz8\") pod \"glance-db-sync-xwrlk\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.837567 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xwrlk" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.857336 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4x8tn-config-w6c65"] Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.858730 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.861591 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.882326 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4x8tn-config-w6c65"] Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.909939 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-scripts\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.910013 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.910164 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4czb\" (UniqueName: \"kubernetes.io/projected/07ded08e-84f8-428f-9ce7-c945a035f0a0-kube-api-access-g4czb\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.910227 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-additional-scripts\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.910315 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-log-ovn\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:01 crc kubenswrapper[4810]: I1203 05:59:01.910407 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run-ovn\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.011758 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4czb\" (UniqueName: \"kubernetes.io/projected/07ded08e-84f8-428f-9ce7-c945a035f0a0-kube-api-access-g4czb\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.011815 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-additional-scripts\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.012572 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-log-ovn\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.012931 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-log-ovn\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.013093 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run-ovn\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.013226 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-scripts\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.013315 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.013478 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.013516 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run-ovn\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.015071 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-additional-scripts\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.015606 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-scripts\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.043486 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4czb\" (UniqueName: \"kubernetes.io/projected/07ded08e-84f8-428f-9ce7-c945a035f0a0-kube-api-access-g4czb\") pod \"ovn-controller-4x8tn-config-w6c65\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.263295 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:02 crc kubenswrapper[4810]: I1203 05:59:02.454565 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-xwrlk"] Dec 03 05:59:03 crc kubenswrapper[4810]: W1203 05:59:03.271095 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd194c59_21ca_4b1f_b269_a2844d332781.slice/crio-d0807cb9eb2692610d3cd23fafcea6ac5c684dc807f6689f32302edf8dbe9784 WatchSource:0}: Error finding container d0807cb9eb2692610d3cd23fafcea6ac5c684dc807f6689f32302edf8dbe9784: Status 404 returned error can't find the container with id d0807cb9eb2692610d3cd23fafcea6ac5c684dc807f6689f32302edf8dbe9784 Dec 03 05:59:03 crc kubenswrapper[4810]: I1203 05:59:03.514415 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xwrlk" event={"ID":"dd194c59-21ca-4b1f-b269-a2844d332781","Type":"ContainerStarted","Data":"d0807cb9eb2692610d3cd23fafcea6ac5c684dc807f6689f32302edf8dbe9784"} Dec 03 05:59:03 crc kubenswrapper[4810]: I1203 05:59:03.702299 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4x8tn-config-w6c65"] Dec 03 05:59:03 crc kubenswrapper[4810]: W1203 05:59:03.706895 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07ded08e_84f8_428f_9ce7_c945a035f0a0.slice/crio-4a9bdfb35741052d3dfbd395c6cc1ac47da99514207e0773a73b73c1be97573b WatchSource:0}: Error finding container 4a9bdfb35741052d3dfbd395c6cc1ac47da99514207e0773a73b73c1be97573b: Status 404 returned error can't find the container with id 4a9bdfb35741052d3dfbd395c6cc1ac47da99514207e0773a73b73c1be97573b Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.525195 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4x8tn-config-w6c65" event={"ID":"07ded08e-84f8-428f-9ce7-c945a035f0a0","Type":"ContainerDied","Data":"0cf4e796041a12e4801958c73c1a93695154b39b2b775d5ab887ad209515f7f9"} Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.526357 4810 generic.go:334] "Generic (PLEG): container finished" podID="07ded08e-84f8-428f-9ce7-c945a035f0a0" containerID="0cf4e796041a12e4801958c73c1a93695154b39b2b775d5ab887ad209515f7f9" exitCode=0 Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.526468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4x8tn-config-w6c65" event={"ID":"07ded08e-84f8-428f-9ce7-c945a035f0a0","Type":"ContainerStarted","Data":"4a9bdfb35741052d3dfbd395c6cc1ac47da99514207e0773a73b73c1be97573b"} Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.537720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"9f8cdd33cbfffd573d9b9307ade5a853790f33373569aaf9b8f55fdb6f8ea140"} Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.537922 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"e15e049ba9e47f9ff1960e0c392852ed89410c50119ec45978944a333879ea29"} Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.537943 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"120c56d062e4f6ef4b3b80af5b26d9df2c14799fe658bd8c88042ee753f03fbd"} Dec 03 05:59:04 crc kubenswrapper[4810]: I1203 05:59:04.537957 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"1004c7e1a98bbf7e77722f98ce53b1e77a5d77a8f4d319ba9fdbbe62499f3c69"} Dec 03 05:59:05 crc kubenswrapper[4810]: I1203 05:59:05.557115 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"59ba28f7040a03c825e2dd28312fbcbb322b936981fad46cb2d1c0bc29cb0886"} Dec 03 05:59:05 crc kubenswrapper[4810]: I1203 05:59:05.557472 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"621beb6cb59a38c626f30cb4c13951c5c40b17e4a629980179d94d4871661430"} Dec 03 05:59:05 crc kubenswrapper[4810]: I1203 05:59:05.557482 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"5dfd190b3971562a21784b0961da878a15a501b439915c7b7e02153408b7aa21"} Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.029002 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.198777 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run\") pod \"07ded08e-84f8-428f-9ce7-c945a035f0a0\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.198873 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4czb\" (UniqueName: \"kubernetes.io/projected/07ded08e-84f8-428f-9ce7-c945a035f0a0-kube-api-access-g4czb\") pod \"07ded08e-84f8-428f-9ce7-c945a035f0a0\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.198922 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run-ovn\") pod \"07ded08e-84f8-428f-9ce7-c945a035f0a0\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.198975 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-scripts\") pod \"07ded08e-84f8-428f-9ce7-c945a035f0a0\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199054 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-log-ovn\") pod \"07ded08e-84f8-428f-9ce7-c945a035f0a0\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199006 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run" (OuterVolumeSpecName: "var-run") pod "07ded08e-84f8-428f-9ce7-c945a035f0a0" (UID: "07ded08e-84f8-428f-9ce7-c945a035f0a0"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199151 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "07ded08e-84f8-428f-9ce7-c945a035f0a0" (UID: "07ded08e-84f8-428f-9ce7-c945a035f0a0"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199160 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-additional-scripts\") pod \"07ded08e-84f8-428f-9ce7-c945a035f0a0\" (UID: \"07ded08e-84f8-428f-9ce7-c945a035f0a0\") " Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199682 4810 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199706 4810 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.199854 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "07ded08e-84f8-428f-9ce7-c945a035f0a0" (UID: "07ded08e-84f8-428f-9ce7-c945a035f0a0"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.200394 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "07ded08e-84f8-428f-9ce7-c945a035f0a0" (UID: "07ded08e-84f8-428f-9ce7-c945a035f0a0"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.200622 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-scripts" (OuterVolumeSpecName: "scripts") pod "07ded08e-84f8-428f-9ce7-c945a035f0a0" (UID: "07ded08e-84f8-428f-9ce7-c945a035f0a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.220475 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07ded08e-84f8-428f-9ce7-c945a035f0a0-kube-api-access-g4czb" (OuterVolumeSpecName: "kube-api-access-g4czb") pod "07ded08e-84f8-428f-9ce7-c945a035f0a0" (UID: "07ded08e-84f8-428f-9ce7-c945a035f0a0"). InnerVolumeSpecName "kube-api-access-g4czb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.301566 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.301603 4810 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07ded08e-84f8-428f-9ce7-c945a035f0a0-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.301615 4810 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/07ded08e-84f8-428f-9ce7-c945a035f0a0-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.301625 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4czb\" (UniqueName: \"kubernetes.io/projected/07ded08e-84f8-428f-9ce7-c945a035f0a0-kube-api-access-g4czb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.569833 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4x8tn" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.571462 4810 generic.go:334] "Generic (PLEG): container finished" podID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerID="e4949b492e4394fad97a05af5ecd0f3270a715f053d57898e6bca3965a05913d" exitCode=0 Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.571617 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"43dbe69c-d6b8-4773-ad88-79c3c975afdf","Type":"ContainerDied","Data":"e4949b492e4394fad97a05af5ecd0f3270a715f053d57898e6bca3965a05913d"} Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.573891 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4x8tn-config-w6c65" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.573918 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4x8tn-config-w6c65" event={"ID":"07ded08e-84f8-428f-9ce7-c945a035f0a0","Type":"ContainerDied","Data":"4a9bdfb35741052d3dfbd395c6cc1ac47da99514207e0773a73b73c1be97573b"} Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.573964 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a9bdfb35741052d3dfbd395c6cc1ac47da99514207e0773a73b73c1be97573b" Dec 03 05:59:06 crc kubenswrapper[4810]: I1203 05:59:06.579299 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"49dcd8ea336e63d98bdded43e44017d64482c27caf90cad838e4c2c99ef6fed7"} Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.174414 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4x8tn-config-w6c65"] Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.183407 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4x8tn-config-w6c65"] Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.611814 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"0a01f4da912e4700292a77797bf87fdedae904ea61d1543f3f81b9677dd197f9"} Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.611893 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"7fbb67d44f8d02609ee14ae29b8be2630efc316ce1fffcee5cf650ba8c11a6e9"} Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.614555 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"43dbe69c-d6b8-4773-ad88-79c3c975afdf","Type":"ContainerStarted","Data":"9323cdf4ddf6943307fa9a3dbccc919bc72405af9b884aa7b014b3c2b6faabef"} Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.614846 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 05:59:07 crc kubenswrapper[4810]: I1203 05:59:07.638903 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371960.215897 podStartE2EDuration="1m16.638879648s" podCreationTimestamp="2025-12-03 05:57:51 +0000 UTC" firstStartedPulling="2025-12-03 05:57:52.966019109 +0000 UTC m=+996.901479950" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:59:07.638765485 +0000 UTC m=+1071.574226346" watchObservedRunningTime="2025-12-03 05:59:07.638879648 +0000 UTC m=+1071.574340489" Dec 03 05:59:08 crc kubenswrapper[4810]: I1203 05:59:08.397607 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07ded08e-84f8-428f-9ce7-c945a035f0a0" path="/var/lib/kubelet/pods/07ded08e-84f8-428f-9ce7-c945a035f0a0/volumes" Dec 03 05:59:08 crc kubenswrapper[4810]: I1203 05:59:08.649772 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"3fea2c38696bd1b3b2eae7737efaf8557ae8442e772160d7c95337eb4c31eda3"} Dec 03 05:59:08 crc kubenswrapper[4810]: I1203 05:59:08.649859 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"6e8529c4e5d644cd4db380207697288fc23b9178f25583cdf8e572ef7427af02"} Dec 03 05:59:09 crc kubenswrapper[4810]: I1203 05:59:09.669884 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"54b61cfd23cd19d8bdba497a32063d21baa0c6408cf69c702dc40d1b26468059"} Dec 03 05:59:09 crc kubenswrapper[4810]: I1203 05:59:09.670259 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"f7cecfbe9742f72ca40b6c1974cffafe9132b9620032660df16e0c23709c2412"} Dec 03 05:59:09 crc kubenswrapper[4810]: I1203 05:59:09.670279 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"229a32b8-4f61-4370-afc5-a5d2ddaf1dc8","Type":"ContainerStarted","Data":"a2d637a6f068200f80caf7a051401c3516220e80ddd0a27a86e262a940de78e8"} Dec 03 05:59:09 crc kubenswrapper[4810]: I1203 05:59:09.716938 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=37.352555544 podStartE2EDuration="42.716914845s" podCreationTimestamp="2025-12-03 05:58:27 +0000 UTC" firstStartedPulling="2025-12-03 05:59:01.436648827 +0000 UTC m=+1065.372109658" lastFinishedPulling="2025-12-03 05:59:06.801008118 +0000 UTC m=+1070.736468959" observedRunningTime="2025-12-03 05:59:09.709630364 +0000 UTC m=+1073.645091205" watchObservedRunningTime="2025-12-03 05:59:09.716914845 +0000 UTC m=+1073.652375686" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.019123 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d9fb694c-bn6dt"] Dec 03 05:59:10 crc kubenswrapper[4810]: E1203 05:59:10.019474 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ded08e-84f8-428f-9ce7-c945a035f0a0" containerName="ovn-config" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.019492 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ded08e-84f8-428f-9ce7-c945a035f0a0" containerName="ovn-config" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.019728 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="07ded08e-84f8-428f-9ce7-c945a035f0a0" containerName="ovn-config" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.020625 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.022981 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.052369 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d9fb694c-bn6dt"] Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.182905 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-svc\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.182954 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-swift-storage-0\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.182989 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-sb\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.183037 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-config\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.183067 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-nb\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.183085 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5865z\" (UniqueName: \"kubernetes.io/projected/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-kube-api-access-5865z\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.285077 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-svc\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.285641 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-swift-storage-0\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.285900 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-sb\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.286185 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-config\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.286384 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-nb\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.286563 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5865z\" (UniqueName: \"kubernetes.io/projected/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-kube-api-access-5865z\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.288712 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-svc\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.288984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-sb\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.290131 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-nb\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.289961 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-config\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.289846 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-swift-storage-0\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.307239 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5865z\" (UniqueName: \"kubernetes.io/projected/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-kube-api-access-5865z\") pod \"dnsmasq-dns-59d9fb694c-bn6dt\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.338248 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:10 crc kubenswrapper[4810]: W1203 05:59:10.912260 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4c9f70d_39bd_470d_8b34_97acb0fd2aa9.slice/crio-895436c300df4beb0f77d4abd7e7995ef737ce991957d94df6d34329aba66c87 WatchSource:0}: Error finding container 895436c300df4beb0f77d4abd7e7995ef737ce991957d94df6d34329aba66c87: Status 404 returned error can't find the container with id 895436c300df4beb0f77d4abd7e7995ef737ce991957d94df6d34329aba66c87 Dec 03 05:59:10 crc kubenswrapper[4810]: I1203 05:59:10.922817 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d9fb694c-bn6dt"] Dec 03 05:59:11 crc kubenswrapper[4810]: I1203 05:59:11.692646 4810 generic.go:334] "Generic (PLEG): container finished" podID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerID="71f3b7d8640c16b4770ef34879152df98c7f6f063a9b44667b8c103203ceba59" exitCode=0 Dec 03 05:59:11 crc kubenswrapper[4810]: I1203 05:59:11.692710 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" event={"ID":"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9","Type":"ContainerDied","Data":"71f3b7d8640c16b4770ef34879152df98c7f6f063a9b44667b8c103203ceba59"} Dec 03 05:59:11 crc kubenswrapper[4810]: I1203 05:59:11.693056 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" event={"ID":"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9","Type":"ContainerStarted","Data":"895436c300df4beb0f77d4abd7e7995ef737ce991957d94df6d34329aba66c87"} Dec 03 05:59:12 crc kubenswrapper[4810]: I1203 05:59:12.703587 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" event={"ID":"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9","Type":"ContainerStarted","Data":"5ceb661a2921b2c170abfdf567e43fa400463e4e929af28a6d3744e953cbf609"} Dec 03 05:59:12 crc kubenswrapper[4810]: I1203 05:59:12.704000 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:12 crc kubenswrapper[4810]: I1203 05:59:12.849475 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 05:59:12 crc kubenswrapper[4810]: I1203 05:59:12.887995 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" podStartSLOduration=3.887966456 podStartE2EDuration="3.887966456s" podCreationTimestamp="2025-12-03 05:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:59:12.740211908 +0000 UTC m=+1076.675672749" watchObservedRunningTime="2025-12-03 05:59:12.887966456 +0000 UTC m=+1076.823427297" Dec 03 05:59:20 crc kubenswrapper[4810]: I1203 05:59:20.339952 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:20 crc kubenswrapper[4810]: I1203 05:59:20.404359 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b8d9f7b57-c7z68"] Dec 03 05:59:20 crc kubenswrapper[4810]: I1203 05:59:20.404805 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="dnsmasq-dns" containerID="cri-o://bb4e3e2186bf3011210cdcd3aac68a5e99b75c98eca650faeda19817289dcdbc" gracePeriod=10 Dec 03 05:59:20 crc kubenswrapper[4810]: I1203 05:59:20.832364 4810 generic.go:334] "Generic (PLEG): container finished" podID="27ca79d5-c933-4b20-9f26-9885671647bf" containerID="bb4e3e2186bf3011210cdcd3aac68a5e99b75c98eca650faeda19817289dcdbc" exitCode=0 Dec 03 05:59:20 crc kubenswrapper[4810]: I1203 05:59:20.832415 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" event={"ID":"27ca79d5-c933-4b20-9f26-9885671647bf","Type":"ContainerDied","Data":"bb4e3e2186bf3011210cdcd3aac68a5e99b75c98eca650faeda19817289dcdbc"} Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.467985 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.781123 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-p57j4"] Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.782282 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.797469 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-p57j4"] Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.877191 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2d53-account-create-update-49k7c"] Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.885351 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.891739 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2d53-account-create-update-49k7c"] Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.893333 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.937049 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln7gp\" (UniqueName: \"kubernetes.io/projected/091611a7-14c1-42e6-9f3b-1984d9cb31b3-kube-api-access-ln7gp\") pod \"barbican-db-create-p57j4\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.937130 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/091611a7-14c1-42e6-9f3b-1984d9cb31b3-operator-scripts\") pod \"barbican-db-create-p57j4\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.959766 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-xzmmt"] Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.961060 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.968260 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-xzmmt"] Dec 03 05:59:22 crc kubenswrapper[4810]: I1203 05:59:22.990208 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.038627 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24a4433-214f-4641-a1c4-ec8a35f420ed-operator-scripts\") pod \"cinder-db-create-xzmmt\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.038691 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln7gp\" (UniqueName: \"kubernetes.io/projected/091611a7-14c1-42e6-9f3b-1984d9cb31b3-kube-api-access-ln7gp\") pod \"barbican-db-create-p57j4\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.038745 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5gcz\" (UniqueName: \"kubernetes.io/projected/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-kube-api-access-c5gcz\") pod \"barbican-2d53-account-create-update-49k7c\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.038786 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/091611a7-14c1-42e6-9f3b-1984d9cb31b3-operator-scripts\") pod \"barbican-db-create-p57j4\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.038809 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-operator-scripts\") pod \"barbican-2d53-account-create-update-49k7c\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.038869 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhpzg\" (UniqueName: \"kubernetes.io/projected/d24a4433-214f-4641-a1c4-ec8a35f420ed-kube-api-access-mhpzg\") pod \"cinder-db-create-xzmmt\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.039648 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/091611a7-14c1-42e6-9f3b-1984d9cb31b3-operator-scripts\") pod \"barbican-db-create-p57j4\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.058804 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-900e-account-create-update-rvcrl"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.061639 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.064763 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.078566 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln7gp\" (UniqueName: \"kubernetes.io/projected/091611a7-14c1-42e6-9f3b-1984d9cb31b3-kube-api-access-ln7gp\") pod \"barbican-db-create-p57j4\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.082995 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-900e-account-create-update-rvcrl"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.121997 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.130566 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-87m89"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.131691 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.136261 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.136524 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.136641 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.136788 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xzf75" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.149210 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr7ct\" (UniqueName: \"kubernetes.io/projected/964efc7e-8dae-494a-903a-7208635ff931-kube-api-access-cr7ct\") pod \"cinder-900e-account-create-update-rvcrl\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.149277 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-operator-scripts\") pod \"barbican-2d53-account-create-update-49k7c\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.149313 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/964efc7e-8dae-494a-903a-7208635ff931-operator-scripts\") pod \"cinder-900e-account-create-update-rvcrl\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.149385 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhpzg\" (UniqueName: \"kubernetes.io/projected/d24a4433-214f-4641-a1c4-ec8a35f420ed-kube-api-access-mhpzg\") pod \"cinder-db-create-xzmmt\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.149583 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24a4433-214f-4641-a1c4-ec8a35f420ed-operator-scripts\") pod \"cinder-db-create-xzmmt\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.149652 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5gcz\" (UniqueName: \"kubernetes.io/projected/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-kube-api-access-c5gcz\") pod \"barbican-2d53-account-create-update-49k7c\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.154682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-operator-scripts\") pod \"barbican-2d53-account-create-update-49k7c\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.157666 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-87m89"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.159937 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24a4433-214f-4641-a1c4-ec8a35f420ed-operator-scripts\") pod \"cinder-db-create-xzmmt\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.176930 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhpzg\" (UniqueName: \"kubernetes.io/projected/d24a4433-214f-4641-a1c4-ec8a35f420ed-kube-api-access-mhpzg\") pod \"cinder-db-create-xzmmt\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.187672 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5gcz\" (UniqueName: \"kubernetes.io/projected/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-kube-api-access-c5gcz\") pod \"barbican-2d53-account-create-update-49k7c\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.199161 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-hl4nn"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.200393 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.206135 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.215411 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hl4nn"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.251175 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-config-data\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.251327 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr7ct\" (UniqueName: \"kubernetes.io/projected/964efc7e-8dae-494a-903a-7208635ff931-kube-api-access-cr7ct\") pod \"cinder-900e-account-create-update-rvcrl\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.251379 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96mj6\" (UniqueName: \"kubernetes.io/projected/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-kube-api-access-96mj6\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.251410 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-combined-ca-bundle\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.251437 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/964efc7e-8dae-494a-903a-7208635ff931-operator-scripts\") pod \"cinder-900e-account-create-update-rvcrl\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.252250 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/964efc7e-8dae-494a-903a-7208635ff931-operator-scripts\") pod \"cinder-900e-account-create-update-rvcrl\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.276405 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr7ct\" (UniqueName: \"kubernetes.io/projected/964efc7e-8dae-494a-903a-7208635ff931-kube-api-access-cr7ct\") pod \"cinder-900e-account-create-update-rvcrl\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.283537 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.349856 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9e2e-account-create-update-gzghr"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.352782 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-operator-scripts\") pod \"neutron-db-create-hl4nn\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.353131 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96mj6\" (UniqueName: \"kubernetes.io/projected/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-kube-api-access-96mj6\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.353221 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-combined-ca-bundle\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.353225 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.353365 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbpgv\" (UniqueName: \"kubernetes.io/projected/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-kube-api-access-cbpgv\") pod \"neutron-db-create-hl4nn\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.353422 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-config-data\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.362146 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.362531 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-combined-ca-bundle\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.362830 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-config-data\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.373987 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9e2e-account-create-update-gzghr"] Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.394435 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96mj6\" (UniqueName: \"kubernetes.io/projected/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-kube-api-access-96mj6\") pod \"keystone-db-sync-87m89\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.449561 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.454965 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbpgv\" (UniqueName: \"kubernetes.io/projected/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-kube-api-access-cbpgv\") pod \"neutron-db-create-hl4nn\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.455291 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-operator-scripts\") pod \"neutron-db-create-hl4nn\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.455474 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-operator-scripts\") pod \"neutron-9e2e-account-create-update-gzghr\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.455549 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj8tn\" (UniqueName: \"kubernetes.io/projected/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-kube-api-access-sj8tn\") pod \"neutron-9e2e-account-create-update-gzghr\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.456084 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-operator-scripts\") pod \"neutron-db-create-hl4nn\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.472253 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbpgv\" (UniqueName: \"kubernetes.io/projected/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-kube-api-access-cbpgv\") pod \"neutron-db-create-hl4nn\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.547149 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.557775 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-operator-scripts\") pod \"neutron-9e2e-account-create-update-gzghr\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.557856 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj8tn\" (UniqueName: \"kubernetes.io/projected/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-kube-api-access-sj8tn\") pod \"neutron-9e2e-account-create-update-gzghr\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.558641 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-operator-scripts\") pod \"neutron-9e2e-account-create-update-gzghr\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.566230 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.589626 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj8tn\" (UniqueName: \"kubernetes.io/projected/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-kube-api-access-sj8tn\") pod \"neutron-9e2e-account-create-update-gzghr\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:23 crc kubenswrapper[4810]: I1203 05:59:23.710058 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:24 crc kubenswrapper[4810]: E1203 05:59:24.213841 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-glance-api:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:59:24 crc kubenswrapper[4810]: E1203 05:59:24.213938 4810 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-glance-api:2e38c527ddf6e767040136ecf014e7b9" Dec 03 05:59:24 crc kubenswrapper[4810]: E1203 05:59:24.214131 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.rdoproject.org/podified-master-centos9/openstack-glance-api:2e38c527ddf6e767040136ecf014e7b9,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2pz8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-xwrlk_openstack(dd194c59-21ca-4b1f-b269-a2844d332781): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 05:59:24 crc kubenswrapper[4810]: E1203 05:59:24.215815 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-xwrlk" podUID="dd194c59-21ca-4b1f-b269-a2844d332781" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.626502 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.754762 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4g7h\" (UniqueName: \"kubernetes.io/projected/27ca79d5-c933-4b20-9f26-9885671647bf-kube-api-access-z4g7h\") pod \"27ca79d5-c933-4b20-9f26-9885671647bf\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.755119 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-config\") pod \"27ca79d5-c933-4b20-9f26-9885671647bf\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.755164 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-sb\") pod \"27ca79d5-c933-4b20-9f26-9885671647bf\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.755258 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-nb\") pod \"27ca79d5-c933-4b20-9f26-9885671647bf\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.755304 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-dns-svc\") pod \"27ca79d5-c933-4b20-9f26-9885671647bf\" (UID: \"27ca79d5-c933-4b20-9f26-9885671647bf\") " Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.770092 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ca79d5-c933-4b20-9f26-9885671647bf-kube-api-access-z4g7h" (OuterVolumeSpecName: "kube-api-access-z4g7h") pod "27ca79d5-c933-4b20-9f26-9885671647bf" (UID: "27ca79d5-c933-4b20-9f26-9885671647bf"). InnerVolumeSpecName "kube-api-access-z4g7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.810813 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9e2e-account-create-update-gzghr"] Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.828642 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "27ca79d5-c933-4b20-9f26-9885671647bf" (UID: "27ca79d5-c933-4b20-9f26-9885671647bf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.834116 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-config" (OuterVolumeSpecName: "config") pod "27ca79d5-c933-4b20-9f26-9885671647bf" (UID: "27ca79d5-c933-4b20-9f26-9885671647bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.837745 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "27ca79d5-c933-4b20-9f26-9885671647bf" (UID: "27ca79d5-c933-4b20-9f26-9885671647bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.846627 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "27ca79d5-c933-4b20-9f26-9885671647bf" (UID: "27ca79d5-c933-4b20-9f26-9885671647bf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.856835 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.856862 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4g7h\" (UniqueName: \"kubernetes.io/projected/27ca79d5-c933-4b20-9f26-9885671647bf-kube-api-access-z4g7h\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.856891 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.856902 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.856911 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27ca79d5-c933-4b20-9f26-9885671647bf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.863879 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9e2e-account-create-update-gzghr" event={"ID":"f7a03dec-82f1-44d6-b7b2-700cc7df3cac","Type":"ContainerStarted","Data":"137a14ca47930ddb086fe5c08168f7510d3d8d6f381599752d1202baf265b7d8"} Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.868486 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" event={"ID":"27ca79d5-c933-4b20-9f26-9885671647bf","Type":"ContainerDied","Data":"0092808f9f5fa558d5d7beacf1f50ad1a960c2778101ae6b94ba5da5da34cb64"} Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.868531 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b8d9f7b57-c7z68" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.868545 4810 scope.go:117] "RemoveContainer" containerID="bb4e3e2186bf3011210cdcd3aac68a5e99b75c98eca650faeda19817289dcdbc" Dec 03 05:59:24 crc kubenswrapper[4810]: E1203 05:59:24.869564 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-glance-api:2e38c527ddf6e767040136ecf014e7b9\\\"\"" pod="openstack/glance-db-sync-xwrlk" podUID="dd194c59-21ca-4b1f-b269-a2844d332781" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.907836 4810 scope.go:117] "RemoveContainer" containerID="c264160a788ec84b59147de5a590533a756373d3d39419c80a8a46e8eb9aec35" Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.937490 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b8d9f7b57-c7z68"] Dec 03 05:59:24 crc kubenswrapper[4810]: I1203 05:59:24.944497 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b8d9f7b57-c7z68"] Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.094585 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hl4nn"] Dec 03 05:59:25 crc kubenswrapper[4810]: W1203 05:59:25.105000 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08cc50ca_8fbc_48c0_8a9d_11baf452f56b.slice/crio-82486e4fb9088173de54e4caf77fd8f56d55e09914c1734e6021228cd8a4180b WatchSource:0}: Error finding container 82486e4fb9088173de54e4caf77fd8f56d55e09914c1734e6021228cd8a4180b: Status 404 returned error can't find the container with id 82486e4fb9088173de54e4caf77fd8f56d55e09914c1734e6021228cd8a4180b Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.183265 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-xzmmt"] Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.194788 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-900e-account-create-update-rvcrl"] Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.202848 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-87m89"] Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.209817 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-p57j4"] Dec 03 05:59:25 crc kubenswrapper[4810]: W1203 05:59:25.214514 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f10c1fe_10b9_49e1_ae18_5bd77dda7932.slice/crio-f0a20bc49255da7888c12d2a4124ffe8a103360a2c0bf6c830fd5aec64abb455 WatchSource:0}: Error finding container f0a20bc49255da7888c12d2a4124ffe8a103360a2c0bf6c830fd5aec64abb455: Status 404 returned error can't find the container with id f0a20bc49255da7888c12d2a4124ffe8a103360a2c0bf6c830fd5aec64abb455 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.223564 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2d53-account-create-update-49k7c"] Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.879889 4810 generic.go:334] "Generic (PLEG): container finished" podID="08cc50ca-8fbc-48c0-8a9d-11baf452f56b" containerID="2f8b0a1f681ad6855266a27fe17d32b9a83bb08ab6edc4c9801af6211f538a5f" exitCode=0 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.880022 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hl4nn" event={"ID":"08cc50ca-8fbc-48c0-8a9d-11baf452f56b","Type":"ContainerDied","Data":"2f8b0a1f681ad6855266a27fe17d32b9a83bb08ab6edc4c9801af6211f538a5f"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.880087 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hl4nn" event={"ID":"08cc50ca-8fbc-48c0-8a9d-11baf452f56b","Type":"ContainerStarted","Data":"82486e4fb9088173de54e4caf77fd8f56d55e09914c1734e6021228cd8a4180b"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.882338 4810 generic.go:334] "Generic (PLEG): container finished" podID="091611a7-14c1-42e6-9f3b-1984d9cb31b3" containerID="9e9485aeee115ea4f2390600c5b9dcca925e30ae7f6616f8cc5a137643dba197" exitCode=0 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.882398 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-p57j4" event={"ID":"091611a7-14c1-42e6-9f3b-1984d9cb31b3","Type":"ContainerDied","Data":"9e9485aeee115ea4f2390600c5b9dcca925e30ae7f6616f8cc5a137643dba197"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.882420 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-p57j4" event={"ID":"091611a7-14c1-42e6-9f3b-1984d9cb31b3","Type":"ContainerStarted","Data":"680681d7f84c22aab4361139b7c2e0c5cbe39ba31ac8e7197caa713d6779b04c"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.885416 4810 generic.go:334] "Generic (PLEG): container finished" podID="20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" containerID="83f723c58e8ca4a6b565e340bec7fc076201d79050429253fcd11b5fd05b6440" exitCode=0 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.885526 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2d53-account-create-update-49k7c" event={"ID":"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57","Type":"ContainerDied","Data":"83f723c58e8ca4a6b565e340bec7fc076201d79050429253fcd11b5fd05b6440"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.885545 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2d53-account-create-update-49k7c" event={"ID":"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57","Type":"ContainerStarted","Data":"2265f6d9b63006e1f87219ce3f2e567069f5c430caf03a46439ac77e088830e0"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.887499 4810 generic.go:334] "Generic (PLEG): container finished" podID="f7a03dec-82f1-44d6-b7b2-700cc7df3cac" containerID="9d9a18de7c81871d5c433d54075dfe9ecdec8cf7a4b22b8fa5e4eb54d3bb4a65" exitCode=0 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.887552 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9e2e-account-create-update-gzghr" event={"ID":"f7a03dec-82f1-44d6-b7b2-700cc7df3cac","Type":"ContainerDied","Data":"9d9a18de7c81871d5c433d54075dfe9ecdec8cf7a4b22b8fa5e4eb54d3bb4a65"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.889144 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-87m89" event={"ID":"9f10c1fe-10b9-49e1-ae18-5bd77dda7932","Type":"ContainerStarted","Data":"f0a20bc49255da7888c12d2a4124ffe8a103360a2c0bf6c830fd5aec64abb455"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.894613 4810 generic.go:334] "Generic (PLEG): container finished" podID="d24a4433-214f-4641-a1c4-ec8a35f420ed" containerID="28df5dc97eb2771b9c89c2c2dd7df831bcd227e840198762d64285bed96490e8" exitCode=0 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.894682 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-xzmmt" event={"ID":"d24a4433-214f-4641-a1c4-ec8a35f420ed","Type":"ContainerDied","Data":"28df5dc97eb2771b9c89c2c2dd7df831bcd227e840198762d64285bed96490e8"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.894708 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-xzmmt" event={"ID":"d24a4433-214f-4641-a1c4-ec8a35f420ed","Type":"ContainerStarted","Data":"e4876a261490275915edfbb61d0b6858dc554437dce39d7428a269e15e03334f"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.897335 4810 generic.go:334] "Generic (PLEG): container finished" podID="964efc7e-8dae-494a-903a-7208635ff931" containerID="e7c6f1e0c4789979ea78445cd91ba458541652def5e88bb86fd95a66a4ae21b0" exitCode=0 Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.897466 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-900e-account-create-update-rvcrl" event={"ID":"964efc7e-8dae-494a-903a-7208635ff931","Type":"ContainerDied","Data":"e7c6f1e0c4789979ea78445cd91ba458541652def5e88bb86fd95a66a4ae21b0"} Dec 03 05:59:25 crc kubenswrapper[4810]: I1203 05:59:25.897620 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-900e-account-create-update-rvcrl" event={"ID":"964efc7e-8dae-494a-903a-7208635ff931","Type":"ContainerStarted","Data":"9c6357717c18091fcdf36f1c73af52fd18fc381bd1e48553f5358ad36c383767"} Dec 03 05:59:26 crc kubenswrapper[4810]: I1203 05:59:26.389409 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" path="/var/lib/kubelet/pods/27ca79d5-c933-4b20-9f26-9885671647bf/volumes" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.480564 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.486887 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.492568 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.501021 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.515784 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbpgv\" (UniqueName: \"kubernetes.io/projected/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-kube-api-access-cbpgv\") pod \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.515920 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/091611a7-14c1-42e6-9f3b-1984d9cb31b3-operator-scripts\") pod \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.515965 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-operator-scripts\") pod \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\" (UID: \"08cc50ca-8fbc-48c0-8a9d-11baf452f56b\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.515993 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-operator-scripts\") pod \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.516031 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhpzg\" (UniqueName: \"kubernetes.io/projected/d24a4433-214f-4641-a1c4-ec8a35f420ed-kube-api-access-mhpzg\") pod \"d24a4433-214f-4641-a1c4-ec8a35f420ed\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.516085 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24a4433-214f-4641-a1c4-ec8a35f420ed-operator-scripts\") pod \"d24a4433-214f-4641-a1c4-ec8a35f420ed\" (UID: \"d24a4433-214f-4641-a1c4-ec8a35f420ed\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.516113 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj8tn\" (UniqueName: \"kubernetes.io/projected/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-kube-api-access-sj8tn\") pod \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\" (UID: \"f7a03dec-82f1-44d6-b7b2-700cc7df3cac\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.516142 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln7gp\" (UniqueName: \"kubernetes.io/projected/091611a7-14c1-42e6-9f3b-1984d9cb31b3-kube-api-access-ln7gp\") pod \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\" (UID: \"091611a7-14c1-42e6-9f3b-1984d9cb31b3\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.516846 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/091611a7-14c1-42e6-9f3b-1984d9cb31b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "091611a7-14c1-42e6-9f3b-1984d9cb31b3" (UID: "091611a7-14c1-42e6-9f3b-1984d9cb31b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.517995 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f7a03dec-82f1-44d6-b7b2-700cc7df3cac" (UID: "f7a03dec-82f1-44d6-b7b2-700cc7df3cac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.518497 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "08cc50ca-8fbc-48c0-8a9d-11baf452f56b" (UID: "08cc50ca-8fbc-48c0-8a9d-11baf452f56b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.525159 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-kube-api-access-cbpgv" (OuterVolumeSpecName: "kube-api-access-cbpgv") pod "08cc50ca-8fbc-48c0-8a9d-11baf452f56b" (UID: "08cc50ca-8fbc-48c0-8a9d-11baf452f56b"). InnerVolumeSpecName "kube-api-access-cbpgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.526996 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-kube-api-access-sj8tn" (OuterVolumeSpecName: "kube-api-access-sj8tn") pod "f7a03dec-82f1-44d6-b7b2-700cc7df3cac" (UID: "f7a03dec-82f1-44d6-b7b2-700cc7df3cac"). InnerVolumeSpecName "kube-api-access-sj8tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.527072 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/091611a7-14c1-42e6-9f3b-1984d9cb31b3-kube-api-access-ln7gp" (OuterVolumeSpecName: "kube-api-access-ln7gp") pod "091611a7-14c1-42e6-9f3b-1984d9cb31b3" (UID: "091611a7-14c1-42e6-9f3b-1984d9cb31b3"). InnerVolumeSpecName "kube-api-access-ln7gp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.529609 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d24a4433-214f-4641-a1c4-ec8a35f420ed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d24a4433-214f-4641-a1c4-ec8a35f420ed" (UID: "d24a4433-214f-4641-a1c4-ec8a35f420ed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.531947 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.539890 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d24a4433-214f-4641-a1c4-ec8a35f420ed-kube-api-access-mhpzg" (OuterVolumeSpecName: "kube-api-access-mhpzg") pod "d24a4433-214f-4641-a1c4-ec8a35f420ed" (UID: "d24a4433-214f-4641-a1c4-ec8a35f420ed"). InnerVolumeSpecName "kube-api-access-mhpzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.554186 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.617815 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5gcz\" (UniqueName: \"kubernetes.io/projected/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-kube-api-access-c5gcz\") pod \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618074 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-operator-scripts\") pod \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\" (UID: \"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618128 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr7ct\" (UniqueName: \"kubernetes.io/projected/964efc7e-8dae-494a-903a-7208635ff931-kube-api-access-cr7ct\") pod \"964efc7e-8dae-494a-903a-7208635ff931\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618148 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/964efc7e-8dae-494a-903a-7208635ff931-operator-scripts\") pod \"964efc7e-8dae-494a-903a-7208635ff931\" (UID: \"964efc7e-8dae-494a-903a-7208635ff931\") " Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618655 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhpzg\" (UniqueName: \"kubernetes.io/projected/d24a4433-214f-4641-a1c4-ec8a35f420ed-kube-api-access-mhpzg\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618678 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24a4433-214f-4641-a1c4-ec8a35f420ed-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618689 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj8tn\" (UniqueName: \"kubernetes.io/projected/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-kube-api-access-sj8tn\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618701 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln7gp\" (UniqueName: \"kubernetes.io/projected/091611a7-14c1-42e6-9f3b-1984d9cb31b3-kube-api-access-ln7gp\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618712 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbpgv\" (UniqueName: \"kubernetes.io/projected/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-kube-api-access-cbpgv\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618722 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/091611a7-14c1-42e6-9f3b-1984d9cb31b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618744 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08cc50ca-8fbc-48c0-8a9d-11baf452f56b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618754 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a03dec-82f1-44d6-b7b2-700cc7df3cac-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.618911 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" (UID: "20db688f-f2bb-4ca4-b5b3-638a3f9a2e57"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.619346 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/964efc7e-8dae-494a-903a-7208635ff931-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "964efc7e-8dae-494a-903a-7208635ff931" (UID: "964efc7e-8dae-494a-903a-7208635ff931"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.621468 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-kube-api-access-c5gcz" (OuterVolumeSpecName: "kube-api-access-c5gcz") pod "20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" (UID: "20db688f-f2bb-4ca4-b5b3-638a3f9a2e57"). InnerVolumeSpecName "kube-api-access-c5gcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.625874 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/964efc7e-8dae-494a-903a-7208635ff931-kube-api-access-cr7ct" (OuterVolumeSpecName: "kube-api-access-cr7ct") pod "964efc7e-8dae-494a-903a-7208635ff931" (UID: "964efc7e-8dae-494a-903a-7208635ff931"). InnerVolumeSpecName "kube-api-access-cr7ct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.720666 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5gcz\" (UniqueName: \"kubernetes.io/projected/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-kube-api-access-c5gcz\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.720714 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.720723 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr7ct\" (UniqueName: \"kubernetes.io/projected/964efc7e-8dae-494a-903a-7208635ff931-kube-api-access-cr7ct\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.720744 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/964efc7e-8dae-494a-903a-7208635ff931-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.923893 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-p57j4" event={"ID":"091611a7-14c1-42e6-9f3b-1984d9cb31b3","Type":"ContainerDied","Data":"680681d7f84c22aab4361139b7c2e0c5cbe39ba31ac8e7197caa713d6779b04c"} Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.923942 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="680681d7f84c22aab4361139b7c2e0c5cbe39ba31ac8e7197caa713d6779b04c" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.923956 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-p57j4" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.927061 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2d53-account-create-update-49k7c" event={"ID":"20db688f-f2bb-4ca4-b5b3-638a3f9a2e57","Type":"ContainerDied","Data":"2265f6d9b63006e1f87219ce3f2e567069f5c430caf03a46439ac77e088830e0"} Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.927134 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2d53-account-create-update-49k7c" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.927151 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2265f6d9b63006e1f87219ce3f2e567069f5c430caf03a46439ac77e088830e0" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.930436 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9e2e-account-create-update-gzghr" event={"ID":"f7a03dec-82f1-44d6-b7b2-700cc7df3cac","Type":"ContainerDied","Data":"137a14ca47930ddb086fe5c08168f7510d3d8d6f381599752d1202baf265b7d8"} Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.930476 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="137a14ca47930ddb086fe5c08168f7510d3d8d6f381599752d1202baf265b7d8" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.930546 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9e2e-account-create-update-gzghr" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.936083 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-900e-account-create-update-rvcrl" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.936074 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-900e-account-create-update-rvcrl" event={"ID":"964efc7e-8dae-494a-903a-7208635ff931","Type":"ContainerDied","Data":"9c6357717c18091fcdf36f1c73af52fd18fc381bd1e48553f5358ad36c383767"} Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.936233 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c6357717c18091fcdf36f1c73af52fd18fc381bd1e48553f5358ad36c383767" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.937701 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xzmmt" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.937711 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-xzmmt" event={"ID":"d24a4433-214f-4641-a1c4-ec8a35f420ed","Type":"ContainerDied","Data":"e4876a261490275915edfbb61d0b6858dc554437dce39d7428a269e15e03334f"} Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.937781 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4876a261490275915edfbb61d0b6858dc554437dce39d7428a269e15e03334f" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.939110 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hl4nn" event={"ID":"08cc50ca-8fbc-48c0-8a9d-11baf452f56b","Type":"ContainerDied","Data":"82486e4fb9088173de54e4caf77fd8f56d55e09914c1734e6021228cd8a4180b"} Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.939163 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82486e4fb9088173de54e4caf77fd8f56d55e09914c1734e6021228cd8a4180b" Dec 03 05:59:27 crc kubenswrapper[4810]: I1203 05:59:27.939188 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hl4nn" Dec 03 05:59:31 crc kubenswrapper[4810]: I1203 05:59:31.991389 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-87m89" event={"ID":"9f10c1fe-10b9-49e1-ae18-5bd77dda7932","Type":"ContainerStarted","Data":"e5bb8678fcd9ddf1536907101eba0d146d71c6e9170dda6dbf817287dee6ad43"} Dec 03 05:59:32 crc kubenswrapper[4810]: I1203 05:59:32.017340 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-87m89" podStartSLOduration=3.123627662 podStartE2EDuration="9.017320254s" podCreationTimestamp="2025-12-03 05:59:23 +0000 UTC" firstStartedPulling="2025-12-03 05:59:25.224523271 +0000 UTC m=+1089.159984112" lastFinishedPulling="2025-12-03 05:59:31.118215863 +0000 UTC m=+1095.053676704" observedRunningTime="2025-12-03 05:59:32.0144726 +0000 UTC m=+1095.949933461" watchObservedRunningTime="2025-12-03 05:59:32.017320254 +0000 UTC m=+1095.952781095" Dec 03 05:59:36 crc kubenswrapper[4810]: I1203 05:59:36.028207 4810 generic.go:334] "Generic (PLEG): container finished" podID="9f10c1fe-10b9-49e1-ae18-5bd77dda7932" containerID="e5bb8678fcd9ddf1536907101eba0d146d71c6e9170dda6dbf817287dee6ad43" exitCode=0 Dec 03 05:59:36 crc kubenswrapper[4810]: I1203 05:59:36.028332 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-87m89" event={"ID":"9f10c1fe-10b9-49e1-ae18-5bd77dda7932","Type":"ContainerDied","Data":"e5bb8678fcd9ddf1536907101eba0d146d71c6e9170dda6dbf817287dee6ad43"} Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.330496 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.423238 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-config-data\") pod \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.423362 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-combined-ca-bundle\") pod \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.423440 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96mj6\" (UniqueName: \"kubernetes.io/projected/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-kube-api-access-96mj6\") pod \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\" (UID: \"9f10c1fe-10b9-49e1-ae18-5bd77dda7932\") " Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.429038 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-kube-api-access-96mj6" (OuterVolumeSpecName: "kube-api-access-96mj6") pod "9f10c1fe-10b9-49e1-ae18-5bd77dda7932" (UID: "9f10c1fe-10b9-49e1-ae18-5bd77dda7932"). InnerVolumeSpecName "kube-api-access-96mj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.456814 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f10c1fe-10b9-49e1-ae18-5bd77dda7932" (UID: "9f10c1fe-10b9-49e1-ae18-5bd77dda7932"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.468218 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-config-data" (OuterVolumeSpecName: "config-data") pod "9f10c1fe-10b9-49e1-ae18-5bd77dda7932" (UID: "9f10c1fe-10b9-49e1-ae18-5bd77dda7932"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.525309 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.525351 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:37 crc kubenswrapper[4810]: I1203 05:59:37.525368 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96mj6\" (UniqueName: \"kubernetes.io/projected/9f10c1fe-10b9-49e1-ae18-5bd77dda7932-kube-api-access-96mj6\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.047064 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-87m89" event={"ID":"9f10c1fe-10b9-49e1-ae18-5bd77dda7932","Type":"ContainerDied","Data":"f0a20bc49255da7888c12d2a4124ffe8a103360a2c0bf6c830fd5aec64abb455"} Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.047457 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0a20bc49255da7888c12d2a4124ffe8a103360a2c0bf6c830fd5aec64abb455" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.047150 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-87m89" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.327871 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76c7bd889f-tpg7s"] Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328600 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a03dec-82f1-44d6-b7b2-700cc7df3cac" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328618 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a03dec-82f1-44d6-b7b2-700cc7df3cac" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328629 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f10c1fe-10b9-49e1-ae18-5bd77dda7932" containerName="keystone-db-sync" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328635 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f10c1fe-10b9-49e1-ae18-5bd77dda7932" containerName="keystone-db-sync" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328653 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08cc50ca-8fbc-48c0-8a9d-11baf452f56b" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328659 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="08cc50ca-8fbc-48c0-8a9d-11baf452f56b" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328670 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="091611a7-14c1-42e6-9f3b-1984d9cb31b3" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328676 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="091611a7-14c1-42e6-9f3b-1984d9cb31b3" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328684 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="init" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328690 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="init" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328700 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="dnsmasq-dns" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328706 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="dnsmasq-dns" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328719 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328740 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328748 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="964efc7e-8dae-494a-903a-7208635ff931" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328755 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="964efc7e-8dae-494a-903a-7208635ff931" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: E1203 05:59:38.328768 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d24a4433-214f-4641-a1c4-ec8a35f420ed" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328774 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d24a4433-214f-4641-a1c4-ec8a35f420ed" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328929 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="091611a7-14c1-42e6-9f3b-1984d9cb31b3" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328941 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="08cc50ca-8fbc-48c0-8a9d-11baf452f56b" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328954 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a03dec-82f1-44d6-b7b2-700cc7df3cac" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328966 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328975 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="964efc7e-8dae-494a-903a-7208635ff931" containerName="mariadb-account-create-update" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328984 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f10c1fe-10b9-49e1-ae18-5bd77dda7932" containerName="keystone-db-sync" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.328997 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d24a4433-214f-4641-a1c4-ec8a35f420ed" containerName="mariadb-database-create" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.329003 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ca79d5-c933-4b20-9f26-9885671647bf" containerName="dnsmasq-dns" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.329878 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.350912 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76c7bd889f-tpg7s"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.376169 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hmms8"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.378697 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.385269 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xzf75" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.385611 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.385814 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.386034 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.394521 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.440277 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hmms8"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.452460 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8w46\" (UniqueName: \"kubernetes.io/projected/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-kube-api-access-b8w46\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.452534 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-sb\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.452665 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-swift-storage-0\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.452692 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-svc\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.452755 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-config\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.452789 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-nb\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554062 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-credential-keys\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554119 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjgbw\" (UniqueName: \"kubernetes.io/projected/ba900065-7cd5-4983-a9d3-2d708af18fb3-kube-api-access-kjgbw\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554150 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-config\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554178 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-nb\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554235 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-fernet-keys\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554279 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8w46\" (UniqueName: \"kubernetes.io/projected/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-kube-api-access-b8w46\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554302 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-sb\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554324 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-config-data\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-scripts\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554399 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-swift-storage-0\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554423 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-svc\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.554447 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-combined-ca-bundle\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.555341 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-sb\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.555407 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-config\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.556032 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-swift-storage-0\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.556251 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-svc\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.559134 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-nb\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.559373 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.565078 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.570226 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.570484 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.576189 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.600901 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8w46\" (UniqueName: \"kubernetes.io/projected/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-kube-api-access-b8w46\") pod \"dnsmasq-dns-76c7bd889f-tpg7s\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.658890 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-credential-keys\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.658938 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.658976 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-log-httpd\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.658996 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjgbw\" (UniqueName: \"kubernetes.io/projected/ba900065-7cd5-4983-a9d3-2d708af18fb3-kube-api-access-kjgbw\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659059 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-config-data\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659081 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-fernet-keys\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659175 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtr7n\" (UniqueName: \"kubernetes.io/projected/0881496b-922a-4333-a59d-3f953bcdd31d-kube-api-access-gtr7n\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659195 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-config-data\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-scripts\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659284 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-run-httpd\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659330 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-scripts\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.659359 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-combined-ca-bundle\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.668793 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-config-data\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.672660 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-combined-ca-bundle\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.673207 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.683544 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-fernet-keys\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.684168 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-credential-keys\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.688190 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-scripts\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.745281 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjgbw\" (UniqueName: \"kubernetes.io/projected/ba900065-7cd5-4983-a9d3-2d708af18fb3-kube-api-access-kjgbw\") pod \"keystone-bootstrap-hmms8\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.745364 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-z4zwh"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.746684 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.753140 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.753313 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wb57f" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.753470 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.757792 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-sg2m8"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.759173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760651 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-config-data\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760743 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtr7n\" (UniqueName: \"kubernetes.io/projected/0881496b-922a-4333-a59d-3f953bcdd31d-kube-api-access-gtr7n\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760767 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760803 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-run-httpd\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760836 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-scripts\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760853 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.760868 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-log-httpd\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.761324 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-log-httpd\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.763600 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-bkz8h" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.764048 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.764215 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.764709 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-run-httpd\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.783236 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-sg2m8"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.790493 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-scripts\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.797421 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.804840 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-z4zwh"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.806900 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtr7n\" (UniqueName: \"kubernetes.io/projected/0881496b-922a-4333-a59d-3f953bcdd31d-kube-api-access-gtr7n\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.816075 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-config-data\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.818422 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-combined-ca-bundle\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882276 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ce16c9-937b-4bdb-b5b1-09003d013c3a-etc-machine-id\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882350 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-config-data\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882423 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-combined-ca-bundle\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882444 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-db-sync-config-data\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882490 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-config\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882511 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-scripts\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882606 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s575h\" (UniqueName: \"kubernetes.io/projected/36ce16c9-937b-4bdb-b5b1-09003d013c3a-kube-api-access-s575h\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.882687 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npkmc\" (UniqueName: \"kubernetes.io/projected/83ad186f-b9b2-43c4-8b88-8c6df56cd132-kube-api-access-npkmc\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.903158 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.940055 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76c7bd889f-tpg7s"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.975146 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-nqvx4"] Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983777 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-combined-ca-bundle\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983823 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ce16c9-937b-4bdb-b5b1-09003d013c3a-etc-machine-id\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983856 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-config-data\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983925 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-combined-ca-bundle\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983944 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-db-sync-config-data\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983961 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-config\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.983975 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-scripts\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.984016 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s575h\" (UniqueName: \"kubernetes.io/projected/36ce16c9-937b-4bdb-b5b1-09003d013c3a-kube-api-access-s575h\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.984052 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npkmc\" (UniqueName: \"kubernetes.io/projected/83ad186f-b9b2-43c4-8b88-8c6df56cd132-kube-api-access-npkmc\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.992152 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-config-data\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.992223 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ce16c9-937b-4bdb-b5b1-09003d013c3a-etc-machine-id\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.992658 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-combined-ca-bundle\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:38 crc kubenswrapper[4810]: I1203 05:59:38.999364 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-config\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.005467 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-db-sync-config-data\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.005856 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.010516 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npkmc\" (UniqueName: \"kubernetes.io/projected/83ad186f-b9b2-43c4-8b88-8c6df56cd132-kube-api-access-npkmc\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.020829 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.021706 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cnv4h" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.024425 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.027965 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-combined-ca-bundle\") pod \"neutron-db-sync-sg2m8\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.038071 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s575h\" (UniqueName: \"kubernetes.io/projected/36ce16c9-937b-4bdb-b5b1-09003d013c3a-kube-api-access-s575h\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.050967 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-scripts\") pod \"cinder-db-sync-z4zwh\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.056468 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f45869c47-p6bn8"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.058150 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.085649 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-nqvx4"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.103970 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xwrlk" event={"ID":"dd194c59-21ca-4b1f-b269-a2844d332781","Type":"ContainerStarted","Data":"b7b0251c52336419515a4fd46c4ce2ffe9cda305a485333a3cd4e9bff263a99b"} Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.123804 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-hvg5q"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.125143 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.128185 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.129296 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-6qksq" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.143531 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.146131 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hvg5q"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.153605 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f45869c47-p6bn8"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.154272 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-z4zwh" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.184599 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sg2m8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.189343 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-swift-storage-0\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.189510 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-nb\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.189642 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-config\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.189770 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-svc\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.189878 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln7qj\" (UniqueName: \"kubernetes.io/projected/33fa5884-f2c0-4391-a719-81c4d43605dc-kube-api-access-ln7qj\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.189998 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-db-sync-config-data\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.190091 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x84gw\" (UniqueName: \"kubernetes.io/projected/fbca557f-95c4-460b-9d6d-8cd23b748c5b-kube-api-access-x84gw\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.190186 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-sb\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.190282 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-combined-ca-bundle\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.194202 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-xwrlk" podStartSLOduration=3.906800639 podStartE2EDuration="38.194182685s" podCreationTimestamp="2025-12-03 05:59:01 +0000 UTC" firstStartedPulling="2025-12-03 05:59:03.276156716 +0000 UTC m=+1067.211617577" lastFinishedPulling="2025-12-03 05:59:37.563538762 +0000 UTC m=+1101.498999623" observedRunningTime="2025-12-03 05:59:39.128866756 +0000 UTC m=+1103.064327597" watchObservedRunningTime="2025-12-03 05:59:39.194182685 +0000 UTC m=+1103.129643526" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.294872 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-scripts\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.294958 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-swift-storage-0\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.295065 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-nb\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.295126 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-config\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.295153 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2b44955-f7f1-4819-b948-e82272f18a2b-logs\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.295199 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-svc\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.295243 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln7qj\" (UniqueName: \"kubernetes.io/projected/33fa5884-f2c0-4391-a719-81c4d43605dc-kube-api-access-ln7qj\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.295304 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-db-sync-config-data\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.296852 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-swift-storage-0\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.297837 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x84gw\" (UniqueName: \"kubernetes.io/projected/fbca557f-95c4-460b-9d6d-8cd23b748c5b-kube-api-access-x84gw\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.297910 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-combined-ca-bundle\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.297953 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-sb\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.297981 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-combined-ca-bundle\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.298004 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-config-data\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.298021 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltw98\" (UniqueName: \"kubernetes.io/projected/d2b44955-f7f1-4819-b948-e82272f18a2b-kube-api-access-ltw98\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.298256 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-nb\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.307752 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-db-sync-config-data\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.308047 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-svc\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.309882 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-config\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.313192 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-combined-ca-bundle\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.315340 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-sb\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.346155 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x84gw\" (UniqueName: \"kubernetes.io/projected/fbca557f-95c4-460b-9d6d-8cd23b748c5b-kube-api-access-x84gw\") pod \"dnsmasq-dns-7f45869c47-p6bn8\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.367385 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln7qj\" (UniqueName: \"kubernetes.io/projected/33fa5884-f2c0-4391-a719-81c4d43605dc-kube-api-access-ln7qj\") pod \"barbican-db-sync-nqvx4\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.417890 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-combined-ca-bundle\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.417940 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-config-data\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.417956 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltw98\" (UniqueName: \"kubernetes.io/projected/d2b44955-f7f1-4819-b948-e82272f18a2b-kube-api-access-ltw98\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.417996 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-scripts\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.418068 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2b44955-f7f1-4819-b948-e82272f18a2b-logs\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.418457 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2b44955-f7f1-4819-b948-e82272f18a2b-logs\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.421210 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nqvx4" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.422284 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.465493 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-combined-ca-bundle\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.471452 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-config-data\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.480116 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-scripts\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.486357 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltw98\" (UniqueName: \"kubernetes.io/projected/d2b44955-f7f1-4819-b948-e82272f18a2b-kube-api-access-ltw98\") pod \"placement-db-sync-hvg5q\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.627832 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76c7bd889f-tpg7s"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.718325 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hmms8"] Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.763322 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hvg5q" Dec 03 05:59:39 crc kubenswrapper[4810]: I1203 05:59:39.837143 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 05:59:40 crc kubenswrapper[4810]: I1203 05:59:40.860276 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.147259 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hmms8" event={"ID":"ba900065-7cd5-4983-a9d3-2d708af18fb3","Type":"ContainerStarted","Data":"de0046bc522b1e4509e9d8f8098879448a223b898845baa1a07247053dd218d5"} Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.148610 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" event={"ID":"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7","Type":"ContainerStarted","Data":"c373d7404fc733be30796dcd45b25a4bbd1ecfa341dbaa1252ba7f50fe80f1be"} Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.150321 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerStarted","Data":"657b4604baead2eb5e04ef29e2b4230c6aa08f46b83217c40c4470b3303f5875"} Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.203536 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-z4zwh"] Dec 03 05:59:43 crc kubenswrapper[4810]: W1203 05:59:43.208017 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36ce16c9_937b_4bdb_b5b1_09003d013c3a.slice/crio-8d9452054e854722680526f55c1568028f035adec52559df7818e8f3fc4b2899 WatchSource:0}: Error finding container 8d9452054e854722680526f55c1568028f035adec52559df7818e8f3fc4b2899: Status 404 returned error can't find the container with id 8d9452054e854722680526f55c1568028f035adec52559df7818e8f3fc4b2899 Dec 03 05:59:43 crc kubenswrapper[4810]: W1203 05:59:43.365193 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbca557f_95c4_460b_9d6d_8cd23b748c5b.slice/crio-5c72ee5fe6afc8b61f6ae61d63e7cc990aee14fc3ccb175346508ddf8d7cdc30 WatchSource:0}: Error finding container 5c72ee5fe6afc8b61f6ae61d63e7cc990aee14fc3ccb175346508ddf8d7cdc30: Status 404 returned error can't find the container with id 5c72ee5fe6afc8b61f6ae61d63e7cc990aee14fc3ccb175346508ddf8d7cdc30 Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.367720 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f45869c47-p6bn8"] Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.455700 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-nqvx4"] Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.469129 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-sg2m8"] Dec 03 05:59:43 crc kubenswrapper[4810]: I1203 05:59:43.479676 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hvg5q"] Dec 03 05:59:43 crc kubenswrapper[4810]: W1203 05:59:43.491785 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846 WatchSource:0}: Error finding container 67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846: Status 404 returned error can't find the container with id 67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846 Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.208790 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-z4zwh" event={"ID":"36ce16c9-937b-4bdb-b5b1-09003d013c3a","Type":"ContainerStarted","Data":"8d9452054e854722680526f55c1568028f035adec52559df7818e8f3fc4b2899"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.213249 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hvg5q" event={"ID":"d2b44955-f7f1-4819-b948-e82272f18a2b","Type":"ContainerStarted","Data":"752973820a7f07eb45853d58adb45447447886abf96e82425dff3169c94a9120"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.215791 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hmms8" event={"ID":"ba900065-7cd5-4983-a9d3-2d708af18fb3","Type":"ContainerStarted","Data":"41e10527d7ee5a5563cb77cc9e193a6aa676322a67c55a735abed2d4b8d56fcb"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.218497 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sg2m8" event={"ID":"83ad186f-b9b2-43c4-8b88-8c6df56cd132","Type":"ContainerStarted","Data":"92ee5f26cb111ef92be899386e950ff0fb2f077896ee97e05a274d74bd87401e"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.218554 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sg2m8" event={"ID":"83ad186f-b9b2-43c4-8b88-8c6df56cd132","Type":"ContainerStarted","Data":"3ca0901902baccacfcc4793b5e616055e5606676b9e649add0a1c11b8341bd81"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.220170 4810 generic.go:334] "Generic (PLEG): container finished" podID="6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" containerID="bb6de01dc782183f92c5f698697a06f4440f87429d3a9cda0d2aa0c49b189aa6" exitCode=0 Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.220296 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" event={"ID":"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7","Type":"ContainerDied","Data":"bb6de01dc782183f92c5f698697a06f4440f87429d3a9cda0d2aa0c49b189aa6"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.221421 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nqvx4" event={"ID":"33fa5884-f2c0-4391-a719-81c4d43605dc","Type":"ContainerStarted","Data":"67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.223281 4810 generic.go:334] "Generic (PLEG): container finished" podID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerID="590cf4c70389880517781040d69583bd1f2ed1cfab67f26aba9717b0adc59daa" exitCode=0 Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.223314 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" event={"ID":"fbca557f-95c4-460b-9d6d-8cd23b748c5b","Type":"ContainerDied","Data":"590cf4c70389880517781040d69583bd1f2ed1cfab67f26aba9717b0adc59daa"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.223333 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" event={"ID":"fbca557f-95c4-460b-9d6d-8cd23b748c5b","Type":"ContainerStarted","Data":"5c72ee5fe6afc8b61f6ae61d63e7cc990aee14fc3ccb175346508ddf8d7cdc30"} Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.245069 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hmms8" podStartSLOduration=6.245042229 podStartE2EDuration="6.245042229s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:59:44.239879784 +0000 UTC m=+1108.175340625" watchObservedRunningTime="2025-12-03 05:59:44.245042229 +0000 UTC m=+1108.180503070" Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.357973 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-sg2m8" podStartSLOduration=6.357948653 podStartE2EDuration="6.357948653s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:59:44.2959189 +0000 UTC m=+1108.231379761" watchObservedRunningTime="2025-12-03 05:59:44.357948653 +0000 UTC m=+1108.293409494" Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.903436 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.967576 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-nb\") pod \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.967667 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-svc\") pod \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.967758 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-swift-storage-0\") pod \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.967835 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8w46\" (UniqueName: \"kubernetes.io/projected/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-kube-api-access-b8w46\") pod \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.967958 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-sb\") pod \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.968061 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-config\") pod \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\" (UID: \"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7\") " Dec 03 05:59:44 crc kubenswrapper[4810]: I1203 05:59:44.977122 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-kube-api-access-b8w46" (OuterVolumeSpecName: "kube-api-access-b8w46") pod "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" (UID: "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7"). InnerVolumeSpecName "kube-api-access-b8w46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.008107 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" (UID: "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.008771 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" (UID: "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.015788 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-config" (OuterVolumeSpecName: "config") pod "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" (UID: "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.017164 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" (UID: "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.040612 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" (UID: "6d4d8ed1-63d7-420a-94f0-c0f3f42451c7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.070408 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.070455 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.070470 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.070482 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.070496 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8w46\" (UniqueName: \"kubernetes.io/projected/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-kube-api-access-b8w46\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.070507 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.235825 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" event={"ID":"6d4d8ed1-63d7-420a-94f0-c0f3f42451c7","Type":"ContainerDied","Data":"c373d7404fc733be30796dcd45b25a4bbd1ecfa341dbaa1252ba7f50fe80f1be"} Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.235894 4810 scope.go:117] "RemoveContainer" containerID="bb6de01dc782183f92c5f698697a06f4440f87429d3a9cda0d2aa0c49b189aa6" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.236120 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76c7bd889f-tpg7s" Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.271389 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" event={"ID":"fbca557f-95c4-460b-9d6d-8cd23b748c5b","Type":"ContainerStarted","Data":"6b553e38d4f7db49c4d91fb17bfd327e691b96fb600c6f80ca38c15d963aafb3"} Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.326817 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76c7bd889f-tpg7s"] Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.339883 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76c7bd889f-tpg7s"] Dec 03 05:59:45 crc kubenswrapper[4810]: I1203 05:59:45.342209 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" podStartSLOduration=7.342188666 podStartE2EDuration="7.342188666s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 05:59:45.334033182 +0000 UTC m=+1109.269494033" watchObservedRunningTime="2025-12-03 05:59:45.342188666 +0000 UTC m=+1109.277649507" Dec 03 05:59:46 crc kubenswrapper[4810]: I1203 05:59:46.290722 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:46 crc kubenswrapper[4810]: I1203 05:59:46.419011 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" path="/var/lib/kubelet/pods/6d4d8ed1-63d7-420a-94f0-c0f3f42451c7/volumes" Dec 03 05:59:49 crc kubenswrapper[4810]: I1203 05:59:49.347985 4810 generic.go:334] "Generic (PLEG): container finished" podID="ba900065-7cd5-4983-a9d3-2d708af18fb3" containerID="41e10527d7ee5a5563cb77cc9e193a6aa676322a67c55a735abed2d4b8d56fcb" exitCode=0 Dec 03 05:59:49 crc kubenswrapper[4810]: I1203 05:59:49.348051 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hmms8" event={"ID":"ba900065-7cd5-4983-a9d3-2d708af18fb3","Type":"ContainerDied","Data":"41e10527d7ee5a5563cb77cc9e193a6aa676322a67c55a735abed2d4b8d56fcb"} Dec 03 05:59:49 crc kubenswrapper[4810]: I1203 05:59:49.424914 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 05:59:49 crc kubenswrapper[4810]: I1203 05:59:49.502588 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d9fb694c-bn6dt"] Dec 03 05:59:49 crc kubenswrapper[4810]: I1203 05:59:49.502907 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="dnsmasq-dns" containerID="cri-o://5ceb661a2921b2c170abfdf567e43fa400463e4e929af28a6d3744e953cbf609" gracePeriod=10 Dec 03 05:59:50 crc kubenswrapper[4810]: I1203 05:59:50.339460 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: connect: connection refused" Dec 03 05:59:50 crc kubenswrapper[4810]: I1203 05:59:50.358666 4810 generic.go:334] "Generic (PLEG): container finished" podID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerID="5ceb661a2921b2c170abfdf567e43fa400463e4e929af28a6d3744e953cbf609" exitCode=0 Dec 03 05:59:50 crc kubenswrapper[4810]: I1203 05:59:50.358739 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" event={"ID":"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9","Type":"ContainerDied","Data":"5ceb661a2921b2c170abfdf567e43fa400463e4e929af28a6d3744e953cbf609"} Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.814182 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.905484 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-credential-keys\") pod \"ba900065-7cd5-4983-a9d3-2d708af18fb3\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.905857 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-fernet-keys\") pod \"ba900065-7cd5-4983-a9d3-2d708af18fb3\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.905880 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-config-data\") pod \"ba900065-7cd5-4983-a9d3-2d708af18fb3\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.905925 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-combined-ca-bundle\") pod \"ba900065-7cd5-4983-a9d3-2d708af18fb3\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.905987 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjgbw\" (UniqueName: \"kubernetes.io/projected/ba900065-7cd5-4983-a9d3-2d708af18fb3-kube-api-access-kjgbw\") pod \"ba900065-7cd5-4983-a9d3-2d708af18fb3\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.906009 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-scripts\") pod \"ba900065-7cd5-4983-a9d3-2d708af18fb3\" (UID: \"ba900065-7cd5-4983-a9d3-2d708af18fb3\") " Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.913396 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ba900065-7cd5-4983-a9d3-2d708af18fb3" (UID: "ba900065-7cd5-4983-a9d3-2d708af18fb3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.916385 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ba900065-7cd5-4983-a9d3-2d708af18fb3" (UID: "ba900065-7cd5-4983-a9d3-2d708af18fb3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.921360 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba900065-7cd5-4983-a9d3-2d708af18fb3-kube-api-access-kjgbw" (OuterVolumeSpecName: "kube-api-access-kjgbw") pod "ba900065-7cd5-4983-a9d3-2d708af18fb3" (UID: "ba900065-7cd5-4983-a9d3-2d708af18fb3"). InnerVolumeSpecName "kube-api-access-kjgbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.925374 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-scripts" (OuterVolumeSpecName: "scripts") pod "ba900065-7cd5-4983-a9d3-2d708af18fb3" (UID: "ba900065-7cd5-4983-a9d3-2d708af18fb3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:51 crc kubenswrapper[4810]: I1203 05:59:51.960168 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba900065-7cd5-4983-a9d3-2d708af18fb3" (UID: "ba900065-7cd5-4983-a9d3-2d708af18fb3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.004801 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-config-data" (OuterVolumeSpecName: "config-data") pod "ba900065-7cd5-4983-a9d3-2d708af18fb3" (UID: "ba900065-7cd5-4983-a9d3-2d708af18fb3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.007943 4810 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.007974 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.007987 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.008001 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjgbw\" (UniqueName: \"kubernetes.io/projected/ba900065-7cd5-4983-a9d3-2d708af18fb3-kube-api-access-kjgbw\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.008010 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.008017 4810 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba900065-7cd5-4983-a9d3-2d708af18fb3-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.382638 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hmms8" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.394612 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hmms8" event={"ID":"ba900065-7cd5-4983-a9d3-2d708af18fb3","Type":"ContainerDied","Data":"de0046bc522b1e4509e9d8f8098879448a223b898845baa1a07247053dd218d5"} Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.394661 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de0046bc522b1e4509e9d8f8098879448a223b898845baa1a07247053dd218d5" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.895369 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hmms8"] Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.902835 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hmms8"] Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.995248 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-mmgx4"] Dec 03 05:59:52 crc kubenswrapper[4810]: E1203 05:59:52.995720 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba900065-7cd5-4983-a9d3-2d708af18fb3" containerName="keystone-bootstrap" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.995758 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba900065-7cd5-4983-a9d3-2d708af18fb3" containerName="keystone-bootstrap" Dec 03 05:59:52 crc kubenswrapper[4810]: E1203 05:59:52.995790 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" containerName="init" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.995798 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" containerName="init" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.996027 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba900065-7cd5-4983-a9d3-2d708af18fb3" containerName="keystone-bootstrap" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.996046 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d4d8ed1-63d7-420a-94f0-c0f3f42451c7" containerName="init" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.996827 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:52 crc kubenswrapper[4810]: I1203 05:59:52.999520 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:52.999645 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.000180 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.001301 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xzf75" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.001478 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.008162 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-mmgx4"] Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.129943 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jfcm\" (UniqueName: \"kubernetes.io/projected/4f374320-accd-46d6-a286-3f2bac0a4180-kube-api-access-7jfcm\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.130017 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-combined-ca-bundle\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.130041 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-credential-keys\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.130093 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-config-data\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.130123 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-fernet-keys\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.130580 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-scripts\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.232816 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jfcm\" (UniqueName: \"kubernetes.io/projected/4f374320-accd-46d6-a286-3f2bac0a4180-kube-api-access-7jfcm\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.232885 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-combined-ca-bundle\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.232914 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-credential-keys\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.232972 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-config-data\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.233008 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-fernet-keys\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.233084 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-scripts\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.239343 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-scripts\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.239527 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-fernet-keys\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.239921 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-combined-ca-bundle\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.243309 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-credential-keys\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.243521 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-config-data\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.252194 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jfcm\" (UniqueName: \"kubernetes.io/projected/4f374320-accd-46d6-a286-3f2bac0a4180-kube-api-access-7jfcm\") pod \"keystone-bootstrap-mmgx4\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:53 crc kubenswrapper[4810]: I1203 05:59:53.324202 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mmgx4" Dec 03 05:59:54 crc kubenswrapper[4810]: I1203 05:59:54.391596 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba900065-7cd5-4983-a9d3-2d708af18fb3" path="/var/lib/kubelet/pods/ba900065-7cd5-4983-a9d3-2d708af18fb3/volumes" Dec 03 05:59:54 crc kubenswrapper[4810]: I1203 05:59:54.438221 4810 generic.go:334] "Generic (PLEG): container finished" podID="dd194c59-21ca-4b1f-b269-a2844d332781" containerID="b7b0251c52336419515a4fd46c4ce2ffe9cda305a485333a3cd4e9bff263a99b" exitCode=0 Dec 03 05:59:54 crc kubenswrapper[4810]: I1203 05:59:54.438348 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xwrlk" event={"ID":"dd194c59-21ca-4b1f-b269-a2844d332781","Type":"ContainerDied","Data":"b7b0251c52336419515a4fd46c4ce2ffe9cda305a485333a3cd4e9bff263a99b"} Dec 03 05:59:54 crc kubenswrapper[4810]: I1203 05:59:54.976912 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.081289 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-swift-storage-0\") pod \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.081392 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-svc\") pod \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.081420 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-config\") pod \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.081541 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5865z\" (UniqueName: \"kubernetes.io/projected/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-kube-api-access-5865z\") pod \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.081638 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-nb\") pod \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.081663 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-sb\") pod \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\" (UID: \"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9\") " Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.103117 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-kube-api-access-5865z" (OuterVolumeSpecName: "kube-api-access-5865z") pod "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" (UID: "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9"). InnerVolumeSpecName "kube-api-access-5865z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.132922 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" (UID: "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.132925 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" (UID: "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.147547 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" (UID: "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.155923 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-config" (OuterVolumeSpecName: "config") pod "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" (UID: "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.157355 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" (UID: "c4c9f70d-39bd-470d-8b34-97acb0fd2aa9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.183545 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.183583 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-config\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.183596 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5865z\" (UniqueName: \"kubernetes.io/projected/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-kube-api-access-5865z\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.183611 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.183620 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.183630 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.450460 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.452274 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d9fb694c-bn6dt" event={"ID":"c4c9f70d-39bd-470d-8b34-97acb0fd2aa9","Type":"ContainerDied","Data":"895436c300df4beb0f77d4abd7e7995ef737ce991957d94df6d34329aba66c87"} Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.453792 4810 scope.go:117] "RemoveContainer" containerID="5ceb661a2921b2c170abfdf567e43fa400463e4e929af28a6d3744e953cbf609" Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.494564 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d9fb694c-bn6dt"] Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.504105 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d9fb694c-bn6dt"] Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.677617 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 05:59:55 crc kubenswrapper[4810]: I1203 05:59:55.677679 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 05:59:56 crc kubenswrapper[4810]: I1203 05:59:56.390211 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" path="/var/lib/kubelet/pods/c4c9f70d-39bd-470d-8b34-97acb0fd2aa9/volumes" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.157757 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79"] Dec 03 06:00:00 crc kubenswrapper[4810]: E1203 06:00:00.158197 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="init" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.158215 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="init" Dec 03 06:00:00 crc kubenswrapper[4810]: E1203 06:00:00.158237 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="dnsmasq-dns" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.158243 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="dnsmasq-dns" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.158420 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4c9f70d-39bd-470d-8b34-97acb0fd2aa9" containerName="dnsmasq-dns" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.159087 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.162016 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.162473 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.173302 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79"] Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.278060 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wkxv\" (UniqueName: \"kubernetes.io/projected/6fd6f915-b856-4492-bcf7-11d93ac2f696-kube-api-access-6wkxv\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.278224 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fd6f915-b856-4492-bcf7-11d93ac2f696-secret-volume\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.278271 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fd6f915-b856-4492-bcf7-11d93ac2f696-config-volume\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.380430 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fd6f915-b856-4492-bcf7-11d93ac2f696-secret-volume\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.380504 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fd6f915-b856-4492-bcf7-11d93ac2f696-config-volume\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.380577 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wkxv\" (UniqueName: \"kubernetes.io/projected/6fd6f915-b856-4492-bcf7-11d93ac2f696-kube-api-access-6wkxv\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.382256 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fd6f915-b856-4492-bcf7-11d93ac2f696-config-volume\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.399958 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fd6f915-b856-4492-bcf7-11d93ac2f696-secret-volume\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.403265 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wkxv\" (UniqueName: \"kubernetes.io/projected/6fd6f915-b856-4492-bcf7-11d93ac2f696-kube-api-access-6wkxv\") pod \"collect-profiles-29412360-vnf79\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:00 crc kubenswrapper[4810]: I1203 06:00:00.488436 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.227850 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xwrlk" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.270702 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-db-sync-config-data\") pod \"dd194c59-21ca-4b1f-b269-a2844d332781\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.270972 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-config-data\") pod \"dd194c59-21ca-4b1f-b269-a2844d332781\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.271004 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2pz8\" (UniqueName: \"kubernetes.io/projected/dd194c59-21ca-4b1f-b269-a2844d332781-kube-api-access-k2pz8\") pod \"dd194c59-21ca-4b1f-b269-a2844d332781\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.271034 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-combined-ca-bundle\") pod \"dd194c59-21ca-4b1f-b269-a2844d332781\" (UID: \"dd194c59-21ca-4b1f-b269-a2844d332781\") " Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.284155 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd194c59-21ca-4b1f-b269-a2844d332781-kube-api-access-k2pz8" (OuterVolumeSpecName: "kube-api-access-k2pz8") pod "dd194c59-21ca-4b1f-b269-a2844d332781" (UID: "dd194c59-21ca-4b1f-b269-a2844d332781"). InnerVolumeSpecName "kube-api-access-k2pz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.284403 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "dd194c59-21ca-4b1f-b269-a2844d332781" (UID: "dd194c59-21ca-4b1f-b269-a2844d332781"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.313161 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd194c59-21ca-4b1f-b269-a2844d332781" (UID: "dd194c59-21ca-4b1f-b269-a2844d332781"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.331853 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-config-data" (OuterVolumeSpecName: "config-data") pod "dd194c59-21ca-4b1f-b269-a2844d332781" (UID: "dd194c59-21ca-4b1f-b269-a2844d332781"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.374152 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.374500 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2pz8\" (UniqueName: \"kubernetes.io/projected/dd194c59-21ca-4b1f-b269-a2844d332781-kube-api-access-k2pz8\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.374575 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.374637 4810 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dd194c59-21ca-4b1f-b269-a2844d332781-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.550867 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xwrlk" event={"ID":"dd194c59-21ca-4b1f-b269-a2844d332781","Type":"ContainerDied","Data":"d0807cb9eb2692610d3cd23fafcea6ac5c684dc807f6689f32302edf8dbe9784"} Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.550921 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0807cb9eb2692610d3cd23fafcea6ac5c684dc807f6689f32302edf8dbe9784" Dec 03 06:00:05 crc kubenswrapper[4810]: I1203 06:00:05.551022 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xwrlk" Dec 03 06:00:06 crc kubenswrapper[4810]: I1203 06:00:06.422991 4810 scope.go:117] "RemoveContainer" containerID="71f3b7d8640c16b4770ef34879152df98c7f6f063a9b44667b8c103203ceba59" Dec 03 06:00:06 crc kubenswrapper[4810]: E1203 06:00:06.472936 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:2e38c527ddf6e767040136ecf014e7b9" Dec 03 06:00:06 crc kubenswrapper[4810]: E1203 06:00:06.473373 4810 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:2e38c527ddf6e767040136ecf014e7b9" Dec 03 06:00:06 crc kubenswrapper[4810]: E1203 06:00:06.473542 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:2e38c527ddf6e767040136ecf014e7b9,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s575h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-z4zwh_openstack(36ce16c9-937b-4bdb-b5b1-09003d013c3a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 06:00:06 crc kubenswrapper[4810]: E1203 06:00:06.477820 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-z4zwh" podUID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" Dec 03 06:00:06 crc kubenswrapper[4810]: E1203 06:00:06.719673 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:2e38c527ddf6e767040136ecf014e7b9\\\"\"" pod="openstack/cinder-db-sync-z4zwh" podUID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:06.939760 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5584c78d67-wkldn"] Dec 03 06:00:07 crc kubenswrapper[4810]: E1203 06:00:06.940316 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd194c59-21ca-4b1f-b269-a2844d332781" containerName="glance-db-sync" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:06.940331 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd194c59-21ca-4b1f-b269-a2844d332781" containerName="glance-db-sync" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:06.940561 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd194c59-21ca-4b1f-b269-a2844d332781" containerName="glance-db-sync" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:06.941681 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:06.948602 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5584c78d67-wkldn"] Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.130475 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-svc\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.130871 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-nb\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.130901 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-sb\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.130947 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bmc9\" (UniqueName: \"kubernetes.io/projected/116ca285-75a8-411e-8be6-9f1f880c0576-kube-api-access-9bmc9\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.130973 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-config\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.131030 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-swift-storage-0\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.175394 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-mmgx4"] Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.233902 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-svc\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.233956 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-nb\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.233995 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-sb\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.234043 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bmc9\" (UniqueName: \"kubernetes.io/projected/116ca285-75a8-411e-8be6-9f1f880c0576-kube-api-access-9bmc9\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.234068 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-config\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.234119 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-swift-storage-0\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.235066 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-swift-storage-0\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.235513 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-sb\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.235964 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-config\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.236151 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-svc\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.243706 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-nb\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.256424 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bmc9\" (UniqueName: \"kubernetes.io/projected/116ca285-75a8-411e-8be6-9f1f880c0576-kube-api-access-9bmc9\") pod \"dnsmasq-dns-5584c78d67-wkldn\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.358369 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79"] Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.362316 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.669683 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerStarted","Data":"513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05"} Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.672651 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mmgx4" event={"ID":"4f374320-accd-46d6-a286-3f2bac0a4180","Type":"ContainerStarted","Data":"9d215bf807965bd20646a7af7e21f9929cc210cc14d8e5cc34002f2d8a0485ee"} Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.672697 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mmgx4" event={"ID":"4f374320-accd-46d6-a286-3f2bac0a4180","Type":"ContainerStarted","Data":"420ac0101f4453afb5ba838daace34d1ead5f9677dceb8c99c6752400eca2534"} Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.685790 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nqvx4" event={"ID":"33fa5884-f2c0-4391-a719-81c4d43605dc","Type":"ContainerStarted","Data":"e0cbbe960b81988c6280994b51906994971db03494dd0a0a5692e7eb2e4b780c"} Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.693877 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" event={"ID":"6fd6f915-b856-4492-bcf7-11d93ac2f696","Type":"ContainerStarted","Data":"a00edc9759d194d600b0e5e7ba58938a746d8dc043a07d604ba35b6ed449a7ed"} Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.703902 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-mmgx4" podStartSLOduration=15.703879452 podStartE2EDuration="15.703879452s" podCreationTimestamp="2025-12-03 05:59:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:07.69385118 +0000 UTC m=+1131.629312021" watchObservedRunningTime="2025-12-03 06:00:07.703879452 +0000 UTC m=+1131.639340293" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.704294 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hvg5q" event={"ID":"d2b44955-f7f1-4819-b948-e82272f18a2b","Type":"ContainerStarted","Data":"d0483bcf594721134f7ddfd975526d1b9874fe8a364e5c08c8755bac096eb292"} Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.720926 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-nqvx4" podStartSLOduration=6.7923956 podStartE2EDuration="29.720904898s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="2025-12-03 05:59:43.499502981 +0000 UTC m=+1107.434963822" lastFinishedPulling="2025-12-03 06:00:06.428012269 +0000 UTC m=+1130.363473120" observedRunningTime="2025-12-03 06:00:07.710619738 +0000 UTC m=+1131.646080579" watchObservedRunningTime="2025-12-03 06:00:07.720904898 +0000 UTC m=+1131.656365739" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.740450 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-hvg5q" podStartSLOduration=8.157589011 podStartE2EDuration="29.740424808s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="2025-12-03 05:59:43.523020827 +0000 UTC m=+1107.458481668" lastFinishedPulling="2025-12-03 06:00:05.105856614 +0000 UTC m=+1129.041317465" observedRunningTime="2025-12-03 06:00:07.733184479 +0000 UTC m=+1131.668645320" watchObservedRunningTime="2025-12-03 06:00:07.740424808 +0000 UTC m=+1131.675885639" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.754117 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" podStartSLOduration=7.754094246 podStartE2EDuration="7.754094246s" podCreationTimestamp="2025-12-03 06:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:07.753137961 +0000 UTC m=+1131.688598802" watchObservedRunningTime="2025-12-03 06:00:07.754094246 +0000 UTC m=+1131.689555087" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.842915 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.844465 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.852229 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-qfwpn" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.852390 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.853213 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.863424 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949169 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-config-data\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949232 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949251 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949282 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfj4s\" (UniqueName: \"kubernetes.io/projected/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-kube-api-access-tfj4s\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949314 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-scripts\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949634 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:07 crc kubenswrapper[4810]: I1203 06:00:07.949785 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-logs\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.052701 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053113 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053072 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053378 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfj4s\" (UniqueName: \"kubernetes.io/projected/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-kube-api-access-tfj4s\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053510 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-scripts\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.053893 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-logs\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.054241 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-config-data\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.055143 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-logs\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.071437 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-config-data\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.072072 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.075179 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-scripts\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.075846 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfj4s\" (UniqueName: \"kubernetes.io/projected/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-kube-api-access-tfj4s\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.257890 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.258843 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5584c78d67-wkldn"] Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.276963 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.288037 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.288170 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.290835 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.367527 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.367862 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.368836 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.368971 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrhb5\" (UniqueName: \"kubernetes.io/projected/031a2a5d-bba9-44a7-98b7-fe593eb22924-kube-api-access-zrhb5\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.370398 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-logs\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.370602 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-scripts\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.370702 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-config-data\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471530 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-scripts\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471584 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-config-data\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471619 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471641 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471676 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471714 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrhb5\" (UniqueName: \"kubernetes.io/projected/031a2a5d-bba9-44a7-98b7-fe593eb22924-kube-api-access-zrhb5\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.471759 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-logs\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.472173 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-logs\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.472716 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.472767 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.479494 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.480410 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-config-data\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.482621 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-scripts\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.490914 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrhb5\" (UniqueName: \"kubernetes.io/projected/031a2a5d-bba9-44a7-98b7-fe593eb22924-kube-api-access-zrhb5\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.499664 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.518607 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.645210 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.730589 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" event={"ID":"116ca285-75a8-411e-8be6-9f1f880c0576","Type":"ContainerStarted","Data":"5e4bec0350b16e855ede9d60ebb00cadc12123e7f4fb32000e9b81d4a18a02c3"} Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.734099 4810 generic.go:334] "Generic (PLEG): container finished" podID="6fd6f915-b856-4492-bcf7-11d93ac2f696" containerID="794895541d9a21e66ee930bf3177b97d32372698615d953311fdceb8f1df6819" exitCode=0 Dec 03 06:00:08 crc kubenswrapper[4810]: I1203 06:00:08.735511 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" event={"ID":"6fd6f915-b856-4492-bcf7-11d93ac2f696","Type":"ContainerDied","Data":"794895541d9a21e66ee930bf3177b97d32372698615d953311fdceb8f1df6819"} Dec 03 06:00:09 crc kubenswrapper[4810]: I1203 06:00:09.749448 4810 generic.go:334] "Generic (PLEG): container finished" podID="83ad186f-b9b2-43c4-8b88-8c6df56cd132" containerID="92ee5f26cb111ef92be899386e950ff0fb2f077896ee97e05a274d74bd87401e" exitCode=0 Dec 03 06:00:09 crc kubenswrapper[4810]: I1203 06:00:09.749572 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sg2m8" event={"ID":"83ad186f-b9b2-43c4-8b88-8c6df56cd132","Type":"ContainerDied","Data":"92ee5f26cb111ef92be899386e950ff0fb2f077896ee97e05a274d74bd87401e"} Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.241530 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.290010 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.333718 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wkxv\" (UniqueName: \"kubernetes.io/projected/6fd6f915-b856-4492-bcf7-11d93ac2f696-kube-api-access-6wkxv\") pod \"6fd6f915-b856-4492-bcf7-11d93ac2f696\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.333791 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fd6f915-b856-4492-bcf7-11d93ac2f696-config-volume\") pod \"6fd6f915-b856-4492-bcf7-11d93ac2f696\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.333869 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fd6f915-b856-4492-bcf7-11d93ac2f696-secret-volume\") pod \"6fd6f915-b856-4492-bcf7-11d93ac2f696\" (UID: \"6fd6f915-b856-4492-bcf7-11d93ac2f696\") " Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.335924 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fd6f915-b856-4492-bcf7-11d93ac2f696-config-volume" (OuterVolumeSpecName: "config-volume") pod "6fd6f915-b856-4492-bcf7-11d93ac2f696" (UID: "6fd6f915-b856-4492-bcf7-11d93ac2f696"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.348045 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd6f915-b856-4492-bcf7-11d93ac2f696-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6fd6f915-b856-4492-bcf7-11d93ac2f696" (UID: "6fd6f915-b856-4492-bcf7-11d93ac2f696"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.348096 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fd6f915-b856-4492-bcf7-11d93ac2f696-kube-api-access-6wkxv" (OuterVolumeSpecName: "kube-api-access-6wkxv") pod "6fd6f915-b856-4492-bcf7-11d93ac2f696" (UID: "6fd6f915-b856-4492-bcf7-11d93ac2f696"). InnerVolumeSpecName "kube-api-access-6wkxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.421651 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.435976 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wkxv\" (UniqueName: \"kubernetes.io/projected/6fd6f915-b856-4492-bcf7-11d93ac2f696-kube-api-access-6wkxv\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.436014 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fd6f915-b856-4492-bcf7-11d93ac2f696-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.436029 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fd6f915-b856-4492-bcf7-11d93ac2f696-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:10 crc kubenswrapper[4810]: W1203 06:00:10.524861 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod031a2a5d_bba9_44a7_98b7_fe593eb22924.slice/crio-56530b4a7ffb0e00ea273762ed355d1fb1029bc7efcb23cc2c107adb65f61cbb WatchSource:0}: Error finding container 56530b4a7ffb0e00ea273762ed355d1fb1029bc7efcb23cc2c107adb65f61cbb: Status 404 returned error can't find the container with id 56530b4a7ffb0e00ea273762ed355d1fb1029bc7efcb23cc2c107adb65f61cbb Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.525582 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.618222 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:10 crc kubenswrapper[4810]: W1203 06:00:10.628248 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c3f1d38_7222_45ba_aa1e_7a66b839e49d.slice/crio-62dee238987424500b2373c7709527d6c15d27c516987907ddf5be6c919bf096 WatchSource:0}: Error finding container 62dee238987424500b2373c7709527d6c15d27c516987907ddf5be6c919bf096: Status 404 returned error can't find the container with id 62dee238987424500b2373c7709527d6c15d27c516987907ddf5be6c919bf096 Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.795655 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" event={"ID":"6fd6f915-b856-4492-bcf7-11d93ac2f696","Type":"ContainerDied","Data":"a00edc9759d194d600b0e5e7ba58938a746d8dc043a07d604ba35b6ed449a7ed"} Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.795707 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a00edc9759d194d600b0e5e7ba58938a746d8dc043a07d604ba35b6ed449a7ed" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.795799 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79" Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.800228 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6c3f1d38-7222-45ba-aa1e-7a66b839e49d","Type":"ContainerStarted","Data":"62dee238987424500b2373c7709527d6c15d27c516987907ddf5be6c919bf096"} Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.821908 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"031a2a5d-bba9-44a7-98b7-fe593eb22924","Type":"ContainerStarted","Data":"56530b4a7ffb0e00ea273762ed355d1fb1029bc7efcb23cc2c107adb65f61cbb"} Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.825112 4810 generic.go:334] "Generic (PLEG): container finished" podID="116ca285-75a8-411e-8be6-9f1f880c0576" containerID="b20fd1ad95bd91978406fbfd644d07a292e88577040e6216610eeb57260877a9" exitCode=0 Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.825392 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" event={"ID":"116ca285-75a8-411e-8be6-9f1f880c0576","Type":"ContainerDied","Data":"b20fd1ad95bd91978406fbfd644d07a292e88577040e6216610eeb57260877a9"} Dec 03 06:00:10 crc kubenswrapper[4810]: I1203 06:00:10.847488 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerStarted","Data":"2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194"} Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.322209 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sg2m8" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.361239 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npkmc\" (UniqueName: \"kubernetes.io/projected/83ad186f-b9b2-43c4-8b88-8c6df56cd132-kube-api-access-npkmc\") pod \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.361437 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-combined-ca-bundle\") pod \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.361603 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-config\") pod \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\" (UID: \"83ad186f-b9b2-43c4-8b88-8c6df56cd132\") " Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.391119 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ad186f-b9b2-43c4-8b88-8c6df56cd132-kube-api-access-npkmc" (OuterVolumeSpecName: "kube-api-access-npkmc") pod "83ad186f-b9b2-43c4-8b88-8c6df56cd132" (UID: "83ad186f-b9b2-43c4-8b88-8c6df56cd132"). InnerVolumeSpecName "kube-api-access-npkmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.456163 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83ad186f-b9b2-43c4-8b88-8c6df56cd132" (UID: "83ad186f-b9b2-43c4-8b88-8c6df56cd132"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.463715 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npkmc\" (UniqueName: \"kubernetes.io/projected/83ad186f-b9b2-43c4-8b88-8c6df56cd132-kube-api-access-npkmc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.463768 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.486924 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-config" (OuterVolumeSpecName: "config") pod "83ad186f-b9b2-43c4-8b88-8c6df56cd132" (UID: "83ad186f-b9b2-43c4-8b88-8c6df56cd132"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.568362 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/83ad186f-b9b2-43c4-8b88-8c6df56cd132-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.924769 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6c3f1d38-7222-45ba-aa1e-7a66b839e49d","Type":"ContainerStarted","Data":"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9"} Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.930122 4810 generic.go:334] "Generic (PLEG): container finished" podID="d2b44955-f7f1-4819-b948-e82272f18a2b" containerID="d0483bcf594721134f7ddfd975526d1b9874fe8a364e5c08c8755bac096eb292" exitCode=0 Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.930277 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hvg5q" event={"ID":"d2b44955-f7f1-4819-b948-e82272f18a2b","Type":"ContainerDied","Data":"d0483bcf594721134f7ddfd975526d1b9874fe8a364e5c08c8755bac096eb292"} Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.953437 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sg2m8" event={"ID":"83ad186f-b9b2-43c4-8b88-8c6df56cd132","Type":"ContainerDied","Data":"3ca0901902baccacfcc4793b5e616055e5606676b9e649add0a1c11b8341bd81"} Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.953491 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ca0901902baccacfcc4793b5e616055e5606676b9e649add0a1c11b8341bd81" Dec 03 06:00:11 crc kubenswrapper[4810]: I1203 06:00:11.953568 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sg2m8" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.007531 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"031a2a5d-bba9-44a7-98b7-fe593eb22924","Type":"ContainerStarted","Data":"49feebb18b5746ef49b2dc7f494e1fcd90caa53c28b4a3aef20b676bb4776a5d"} Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.019438 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" event={"ID":"116ca285-75a8-411e-8be6-9f1f880c0576","Type":"ContainerStarted","Data":"b6209bd3d68a47fec30896eee705a702eafa978022a233c48ab306c62dd775e8"} Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.021066 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.038764 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5584c78d67-wkldn"] Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.063943 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c9c8b6bc-hnh9h"] Dec 03 06:00:12 crc kubenswrapper[4810]: E1203 06:00:12.064607 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ad186f-b9b2-43c4-8b88-8c6df56cd132" containerName="neutron-db-sync" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.064635 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ad186f-b9b2-43c4-8b88-8c6df56cd132" containerName="neutron-db-sync" Dec 03 06:00:12 crc kubenswrapper[4810]: E1203 06:00:12.064679 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd6f915-b856-4492-bcf7-11d93ac2f696" containerName="collect-profiles" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.064688 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd6f915-b856-4492-bcf7-11d93ac2f696" containerName="collect-profiles" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.064913 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd6f915-b856-4492-bcf7-11d93ac2f696" containerName="collect-profiles" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.064935 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ad186f-b9b2-43c4-8b88-8c6df56cd132" containerName="neutron-db-sync" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.066111 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c9c8b6bc-hnh9h"] Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.066231 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.100415 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" podStartSLOduration=6.100332878 podStartE2EDuration="6.100332878s" podCreationTimestamp="2025-12-03 06:00:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:12.088509329 +0000 UTC m=+1136.023970170" watchObservedRunningTime="2025-12-03 06:00:12.100332878 +0000 UTC m=+1136.035793719" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.140864 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-76d8d4696d-45zhd"] Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.143143 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.148074 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.148533 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-bkz8h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.148817 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.149012 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.163932 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76d8d4696d-45zhd"] Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.200361 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-swift-storage-0\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.200431 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2xdh\" (UniqueName: \"kubernetes.io/projected/6a6861b3-17c5-4195-bfc4-f51c356d8698-kube-api-access-r2xdh\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.200464 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-nb\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.200508 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-svc\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.200561 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-config\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.201006 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-sb\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302283 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-ovndb-tls-certs\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302353 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-swift-storage-0\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302385 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-combined-ca-bundle\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302413 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2xdh\" (UniqueName: \"kubernetes.io/projected/6a6861b3-17c5-4195-bfc4-f51c356d8698-kube-api-access-r2xdh\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302441 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-config\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302464 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-nb\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302485 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-httpd-config\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302511 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-svc\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302532 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jwtw\" (UniqueName: \"kubernetes.io/projected/c65e15df-594b-4292-b784-a8586fbec721-kube-api-access-6jwtw\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.302569 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-config\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.303568 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-svc\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.303622 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-sb\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.303625 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-config\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.303754 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-nb\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.303908 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-sb\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.304370 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-swift-storage-0\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.323637 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2xdh\" (UniqueName: \"kubernetes.io/projected/6a6861b3-17c5-4195-bfc4-f51c356d8698-kube-api-access-r2xdh\") pod \"dnsmasq-dns-57c9c8b6bc-hnh9h\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.405284 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-ovndb-tls-certs\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.405798 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-combined-ca-bundle\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.405839 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-config\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.405866 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-httpd-config\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.405898 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jwtw\" (UniqueName: \"kubernetes.io/projected/c65e15df-594b-4292-b784-a8586fbec721-kube-api-access-6jwtw\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.412398 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-combined-ca-bundle\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.413251 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-httpd-config\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.414831 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-config\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.415536 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-ovndb-tls-certs\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.422902 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.432145 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jwtw\" (UniqueName: \"kubernetes.io/projected/c65e15df-594b-4292-b784-a8586fbec721-kube-api-access-6jwtw\") pod \"neutron-76d8d4696d-45zhd\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.479651 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:12 crc kubenswrapper[4810]: I1203 06:00:12.915701 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c9c8b6bc-hnh9h"] Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.084881 4810 generic.go:334] "Generic (PLEG): container finished" podID="33fa5884-f2c0-4391-a719-81c4d43605dc" containerID="e0cbbe960b81988c6280994b51906994971db03494dd0a0a5692e7eb2e4b780c" exitCode=0 Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.085030 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nqvx4" event={"ID":"33fa5884-f2c0-4391-a719-81c4d43605dc","Type":"ContainerDied","Data":"e0cbbe960b81988c6280994b51906994971db03494dd0a0a5692e7eb2e4b780c"} Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.087382 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6c3f1d38-7222-45ba-aa1e-7a66b839e49d","Type":"ContainerStarted","Data":"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a"} Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.087513 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-log" containerID="cri-o://6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9" gracePeriod=30 Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.087645 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-httpd" containerID="cri-o://cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a" gracePeriod=30 Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.092049 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" event={"ID":"6a6861b3-17c5-4195-bfc4-f51c356d8698","Type":"ContainerStarted","Data":"c72f6de2be0b8a1c282ed96972dd8d1df2e5c0003205c88e028f76d0824437af"} Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.097238 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"031a2a5d-bba9-44a7-98b7-fe593eb22924","Type":"ContainerStarted","Data":"250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9"} Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.097677 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-log" containerID="cri-o://49feebb18b5746ef49b2dc7f494e1fcd90caa53c28b4a3aef20b676bb4776a5d" gracePeriod=30 Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.097893 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-httpd" containerID="cri-o://250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9" gracePeriod=30 Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.104516 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f374320-accd-46d6-a286-3f2bac0a4180" containerID="9d215bf807965bd20646a7af7e21f9929cc210cc14d8e5cc34002f2d8a0485ee" exitCode=0 Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.104831 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mmgx4" event={"ID":"4f374320-accd-46d6-a286-3f2bac0a4180","Type":"ContainerDied","Data":"9d215bf807965bd20646a7af7e21f9929cc210cc14d8e5cc34002f2d8a0485ee"} Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.144871 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.144843649 podStartE2EDuration="7.144843649s" podCreationTimestamp="2025-12-03 06:00:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:13.126064147 +0000 UTC m=+1137.061524988" watchObservedRunningTime="2025-12-03 06:00:13.144843649 +0000 UTC m=+1137.080304490" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.167303 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.167281446 podStartE2EDuration="6.167281446s" podCreationTimestamp="2025-12-03 06:00:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:13.165939241 +0000 UTC m=+1137.101400092" watchObservedRunningTime="2025-12-03 06:00:13.167281446 +0000 UTC m=+1137.102742287" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.339650 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76d8d4696d-45zhd"] Dec 03 06:00:13 crc kubenswrapper[4810]: E1203 06:00:13.505699 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod031a2a5d_bba9_44a7_98b7_fe593eb22924.slice/crio-250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod031a2a5d_bba9_44a7_98b7_fe593eb22924.slice/crio-conmon-49feebb18b5746ef49b2dc7f494e1fcd90caa53c28b4a3aef20b676bb4776a5d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod031a2a5d_bba9_44a7_98b7_fe593eb22924.slice/crio-conmon-250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9.scope\": RecentStats: unable to find data in memory cache]" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.507469 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hvg5q" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.632188 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-config-data\") pod \"d2b44955-f7f1-4819-b948-e82272f18a2b\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.632791 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2b44955-f7f1-4819-b948-e82272f18a2b-logs\") pod \"d2b44955-f7f1-4819-b948-e82272f18a2b\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.632851 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-scripts\") pod \"d2b44955-f7f1-4819-b948-e82272f18a2b\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.632936 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltw98\" (UniqueName: \"kubernetes.io/projected/d2b44955-f7f1-4819-b948-e82272f18a2b-kube-api-access-ltw98\") pod \"d2b44955-f7f1-4819-b948-e82272f18a2b\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.632968 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-combined-ca-bundle\") pod \"d2b44955-f7f1-4819-b948-e82272f18a2b\" (UID: \"d2b44955-f7f1-4819-b948-e82272f18a2b\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.633798 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2b44955-f7f1-4819-b948-e82272f18a2b-logs" (OuterVolumeSpecName: "logs") pod "d2b44955-f7f1-4819-b948-e82272f18a2b" (UID: "d2b44955-f7f1-4819-b948-e82272f18a2b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.638871 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2b44955-f7f1-4819-b948-e82272f18a2b-kube-api-access-ltw98" (OuterVolumeSpecName: "kube-api-access-ltw98") pod "d2b44955-f7f1-4819-b948-e82272f18a2b" (UID: "d2b44955-f7f1-4819-b948-e82272f18a2b"). InnerVolumeSpecName "kube-api-access-ltw98". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.640088 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-scripts" (OuterVolumeSpecName: "scripts") pod "d2b44955-f7f1-4819-b948-e82272f18a2b" (UID: "d2b44955-f7f1-4819-b948-e82272f18a2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.681030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2b44955-f7f1-4819-b948-e82272f18a2b" (UID: "d2b44955-f7f1-4819-b948-e82272f18a2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.681688 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-config-data" (OuterVolumeSpecName: "config-data") pod "d2b44955-f7f1-4819-b948-e82272f18a2b" (UID: "d2b44955-f7f1-4819-b948-e82272f18a2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.735300 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.735355 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2b44955-f7f1-4819-b948-e82272f18a2b-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.735364 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.735378 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltw98\" (UniqueName: \"kubernetes.io/projected/d2b44955-f7f1-4819-b948-e82272f18a2b-kube-api-access-ltw98\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.735389 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b44955-f7f1-4819-b948-e82272f18a2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.859997 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938322 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-logs\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938468 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfj4s\" (UniqueName: \"kubernetes.io/projected/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-kube-api-access-tfj4s\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938496 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-httpd-run\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938580 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938601 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-scripts\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938652 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-config-data\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.938762 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-combined-ca-bundle\") pod \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\" (UID: \"6c3f1d38-7222-45ba-aa1e-7a66b839e49d\") " Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.941327 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-logs" (OuterVolumeSpecName: "logs") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.942035 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.947120 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.949514 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-kube-api-access-tfj4s" (OuterVolumeSpecName: "kube-api-access-tfj4s") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "kube-api-access-tfj4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:13 crc kubenswrapper[4810]: I1203 06:00:13.950357 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-scripts" (OuterVolumeSpecName: "scripts") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.027811 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-config-data" (OuterVolumeSpecName: "config-data") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042643 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042686 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfj4s\" (UniqueName: \"kubernetes.io/projected/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-kube-api-access-tfj4s\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042698 4810 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042693 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c3f1d38-7222-45ba-aa1e-7a66b839e49d" (UID: "6c3f1d38-7222-45ba-aa1e-7a66b839e49d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042750 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042812 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.042876 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.069722 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.129180 4810 generic.go:334] "Generic (PLEG): container finished" podID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerID="1c90f16c349e6679dfefbb87139b5e695c20fabf7c95029041afcf0796a8ce39" exitCode=0 Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.130174 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" event={"ID":"6a6861b3-17c5-4195-bfc4-f51c356d8698","Type":"ContainerDied","Data":"1c90f16c349e6679dfefbb87139b5e695c20fabf7c95029041afcf0796a8ce39"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.138200 4810 generic.go:334] "Generic (PLEG): container finished" podID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerID="250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9" exitCode=0 Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.138248 4810 generic.go:334] "Generic (PLEG): container finished" podID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerID="49feebb18b5746ef49b2dc7f494e1fcd90caa53c28b4a3aef20b676bb4776a5d" exitCode=143 Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.138340 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"031a2a5d-bba9-44a7-98b7-fe593eb22924","Type":"ContainerDied","Data":"250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.138379 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"031a2a5d-bba9-44a7-98b7-fe593eb22924","Type":"ContainerDied","Data":"49feebb18b5746ef49b2dc7f494e1fcd90caa53c28b4a3aef20b676bb4776a5d"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.142655 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-67f9f779cb-gh9cv"] Dec 03 06:00:14 crc kubenswrapper[4810]: E1203 06:00:14.144453 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-log" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.144481 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-log" Dec 03 06:00:14 crc kubenswrapper[4810]: E1203 06:00:14.144507 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2b44955-f7f1-4819-b948-e82272f18a2b" containerName="placement-db-sync" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.144513 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2b44955-f7f1-4819-b948-e82272f18a2b" containerName="placement-db-sync" Dec 03 06:00:14 crc kubenswrapper[4810]: E1203 06:00:14.144525 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-httpd" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.144531 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-httpd" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.144751 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-log" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.144774 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerName="glance-httpd" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.144785 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2b44955-f7f1-4819-b948-e82272f18a2b" containerName="placement-db-sync" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.145768 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.145805 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3f1d38-7222-45ba-aa1e-7a66b839e49d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.145810 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.151145 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.155905 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175296 4810 generic.go:334] "Generic (PLEG): container finished" podID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerID="cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a" exitCode=143 Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175341 4810 generic.go:334] "Generic (PLEG): container finished" podID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" containerID="6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9" exitCode=143 Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175431 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6c3f1d38-7222-45ba-aa1e-7a66b839e49d","Type":"ContainerDied","Data":"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175467 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6c3f1d38-7222-45ba-aa1e-7a66b839e49d","Type":"ContainerDied","Data":"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175479 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6c3f1d38-7222-45ba-aa1e-7a66b839e49d","Type":"ContainerDied","Data":"62dee238987424500b2373c7709527d6c15d27c516987907ddf5be6c919bf096"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175496 4810 scope.go:117] "RemoveContainer" containerID="cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.175664 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.185761 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67f9f779cb-gh9cv"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.210204 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hvg5q" event={"ID":"d2b44955-f7f1-4819-b948-e82272f18a2b","Type":"ContainerDied","Data":"752973820a7f07eb45853d58adb45447447886abf96e82425dff3169c94a9120"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.210248 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="752973820a7f07eb45853d58adb45447447886abf96e82425dff3169c94a9120" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.210414 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hvg5q" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.223446 4810 scope.go:117] "RemoveContainer" containerID="6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.242293 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" containerName="dnsmasq-dns" containerID="cri-o://b6209bd3d68a47fec30896eee705a702eafa978022a233c48ab306c62dd775e8" gracePeriod=10 Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.243038 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d8d4696d-45zhd" event={"ID":"c65e15df-594b-4292-b784-a8586fbec721","Type":"ContainerStarted","Data":"1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.243109 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.243128 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d8d4696d-45zhd" event={"ID":"c65e15df-594b-4292-b784-a8586fbec721","Type":"ContainerStarted","Data":"fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.243139 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d8d4696d-45zhd" event={"ID":"c65e15df-594b-4292-b784-a8586fbec721","Type":"ContainerStarted","Data":"db5b7c863114ff70f0097f7492ef7bd97dd9dfd9122646bb07b4e27234a4a6d4"} Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247336 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-internal-tls-certs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247478 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggc8n\" (UniqueName: \"kubernetes.io/projected/770e7dec-064e-4641-a94b-78121261d7cd-kube-api-access-ggc8n\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247517 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-combined-ca-bundle\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247543 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-scripts\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247590 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/770e7dec-064e-4641-a94b-78121261d7cd-logs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247648 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-config-data\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.247685 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-public-tls-certs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.297757 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.305368 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.323525 4810 scope.go:117] "RemoveContainer" containerID="cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a" Dec 03 06:00:14 crc kubenswrapper[4810]: E1203 06:00:14.324527 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a\": container with ID starting with cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a not found: ID does not exist" containerID="cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.324610 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a"} err="failed to get container status \"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a\": rpc error: code = NotFound desc = could not find container \"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a\": container with ID starting with cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a not found: ID does not exist" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.324673 4810 scope.go:117] "RemoveContainer" containerID="6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.329004 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-76d8d4696d-45zhd" podStartSLOduration=2.328988592 podStartE2EDuration="2.328988592s" podCreationTimestamp="2025-12-03 06:00:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:14.283502822 +0000 UTC m=+1138.218963663" watchObservedRunningTime="2025-12-03 06:00:14.328988592 +0000 UTC m=+1138.264449433" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.330774 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.332575 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: E1203 06:00:14.333642 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9\": container with ID starting with 6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9 not found: ID does not exist" containerID="6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.333705 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9"} err="failed to get container status \"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9\": rpc error: code = NotFound desc = could not find container \"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9\": container with ID starting with 6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9 not found: ID does not exist" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.333765 4810 scope.go:117] "RemoveContainer" containerID="cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.334099 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a"} err="failed to get container status \"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a\": rpc error: code = NotFound desc = could not find container \"cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a\": container with ID starting with cc16d085b4e25170cc102c0da9d5880132d44b13193c4be6278798fe47676d4a not found: ID does not exist" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.334120 4810 scope.go:117] "RemoveContainer" containerID="6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.334287 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9"} err="failed to get container status \"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9\": rpc error: code = NotFound desc = could not find container \"6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9\": container with ID starting with 6479c0c964e8e8dd0e7c588ac871341d7367f697ac08f2e5447b0a2e5a729be9 not found: ID does not exist" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.338375 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.338872 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.345937 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349351 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggc8n\" (UniqueName: \"kubernetes.io/projected/770e7dec-064e-4641-a94b-78121261d7cd-kube-api-access-ggc8n\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349424 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-combined-ca-bundle\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349454 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-scripts\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349498 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/770e7dec-064e-4641-a94b-78121261d7cd-logs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349556 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-config-data\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349582 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-public-tls-certs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.349627 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-internal-tls-certs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.350835 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/770e7dec-064e-4641-a94b-78121261d7cd-logs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.368741 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-combined-ca-bundle\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.369949 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-config-data\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.372028 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-scripts\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.372468 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-internal-tls-certs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.376556 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/770e7dec-064e-4641-a94b-78121261d7cd-public-tls-certs\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.385428 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggc8n\" (UniqueName: \"kubernetes.io/projected/770e7dec-064e-4641-a94b-78121261d7cd-kube-api-access-ggc8n\") pod \"placement-67f9f779cb-gh9cv\" (UID: \"770e7dec-064e-4641-a94b-78121261d7cd\") " pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.433822 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c3f1d38-7222-45ba-aa1e-7a66b839e49d" path="/var/lib/kubelet/pods/6c3f1d38-7222-45ba-aa1e-7a66b839e49d/volumes" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.451857 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-logs\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.451931 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-scripts\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.451999 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.452082 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-config-data\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.452104 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.452139 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swhz9\" (UniqueName: \"kubernetes.io/projected/bf98f609-ab0e-41f9-8f8a-72324c3ac333-kube-api-access-swhz9\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.452191 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.452241 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.473678 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553207 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-config-data\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553253 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553288 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swhz9\" (UniqueName: \"kubernetes.io/projected/bf98f609-ab0e-41f9-8f8a-72324c3ac333-kube-api-access-swhz9\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553309 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553331 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553372 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-logs\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553399 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-scripts\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.553432 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.570207 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.603844 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-logs\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.623211 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.625540 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swhz9\" (UniqueName: \"kubernetes.io/projected/bf98f609-ab0e-41f9-8f8a-72324c3ac333-kube-api-access-swhz9\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.630369 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.633576 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.639900 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-config-data\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.640369 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-scripts\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.701958 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.913284 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-74c9fd966f-8fc7k"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.915628 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.916529 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.923929 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.924753 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.941590 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-74c9fd966f-8fc7k"] Dec 03 06:00:14 crc kubenswrapper[4810]: I1203 06:00:14.949880 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.046344 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mmgx4" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084385 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-combined-ca-bundle\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084448 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-config-data\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084511 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-logs\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084559 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-scripts\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084631 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-httpd-run\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084747 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.084768 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrhb5\" (UniqueName: \"kubernetes.io/projected/031a2a5d-bba9-44a7-98b7-fe593eb22924-kube-api-access-zrhb5\") pod \"031a2a5d-bba9-44a7-98b7-fe593eb22924\" (UID: \"031a2a5d-bba9-44a7-98b7-fe593eb22924\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085143 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-config\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085202 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-combined-ca-bundle\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085322 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn9qj\" (UniqueName: \"kubernetes.io/projected/411b48fc-bafd-47a2-8bd6-c31e2132b09f-kube-api-access-jn9qj\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085355 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-public-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085408 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-httpd-config\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085436 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-ovndb-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.085455 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-internal-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.088077 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.089957 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-logs" (OuterVolumeSpecName: "logs") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.092187 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.094322 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-scripts" (OuterVolumeSpecName: "scripts") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.096852 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/031a2a5d-bba9-44a7-98b7-fe593eb22924-kube-api-access-zrhb5" (OuterVolumeSpecName: "kube-api-access-zrhb5") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "kube-api-access-zrhb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.135155 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.145847 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-config-data" (OuterVolumeSpecName: "config-data") pod "031a2a5d-bba9-44a7-98b7-fe593eb22924" (UID: "031a2a5d-bba9-44a7-98b7-fe593eb22924"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.187325 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-credential-keys\") pod \"4f374320-accd-46d6-a286-3f2bac0a4180\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.188078 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-config-data\") pod \"4f374320-accd-46d6-a286-3f2bac0a4180\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.188181 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-combined-ca-bundle\") pod \"4f374320-accd-46d6-a286-3f2bac0a4180\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.188234 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-fernet-keys\") pod \"4f374320-accd-46d6-a286-3f2bac0a4180\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.188269 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-scripts\") pod \"4f374320-accd-46d6-a286-3f2bac0a4180\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.189673 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jfcm\" (UniqueName: \"kubernetes.io/projected/4f374320-accd-46d6-a286-3f2bac0a4180-kube-api-access-7jfcm\") pod \"4f374320-accd-46d6-a286-3f2bac0a4180\" (UID: \"4f374320-accd-46d6-a286-3f2bac0a4180\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.199428 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f374320-accd-46d6-a286-3f2bac0a4180-kube-api-access-7jfcm" (OuterVolumeSpecName: "kube-api-access-7jfcm") pod "4f374320-accd-46d6-a286-3f2bac0a4180" (UID: "4f374320-accd-46d6-a286-3f2bac0a4180"). InnerVolumeSpecName "kube-api-access-7jfcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.202928 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4f374320-accd-46d6-a286-3f2bac0a4180" (UID: "4f374320-accd-46d6-a286-3f2bac0a4180"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.203431 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-ovndb-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.203476 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-internal-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.203557 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-config\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.203709 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-combined-ca-bundle\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.203989 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn9qj\" (UniqueName: \"kubernetes.io/projected/411b48fc-bafd-47a2-8bd6-c31e2132b09f-kube-api-access-jn9qj\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204072 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-public-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204242 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-httpd-config\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204351 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204366 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204378 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204388 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/031a2a5d-bba9-44a7-98b7-fe593eb22924-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204399 4810 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/031a2a5d-bba9-44a7-98b7-fe593eb22924-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204409 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jfcm\" (UniqueName: \"kubernetes.io/projected/4f374320-accd-46d6-a286-3f2bac0a4180-kube-api-access-7jfcm\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204432 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204446 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrhb5\" (UniqueName: \"kubernetes.io/projected/031a2a5d-bba9-44a7-98b7-fe593eb22924-kube-api-access-zrhb5\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.204458 4810 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.205261 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4f374320-accd-46d6-a286-3f2bac0a4180" (UID: "4f374320-accd-46d6-a286-3f2bac0a4180"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.216233 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-combined-ca-bundle\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.218649 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-scripts" (OuterVolumeSpecName: "scripts") pod "4f374320-accd-46d6-a286-3f2bac0a4180" (UID: "4f374320-accd-46d6-a286-3f2bac0a4180"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.241752 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-public-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.258555 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-httpd-config\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.258763 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-config\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.258884 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-ovndb-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.259767 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/411b48fc-bafd-47a2-8bd6-c31e2132b09f-internal-tls-certs\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.294938 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn9qj\" (UniqueName: \"kubernetes.io/projected/411b48fc-bafd-47a2-8bd6-c31e2132b09f-kube-api-access-jn9qj\") pod \"neutron-74c9fd966f-8fc7k\" (UID: \"411b48fc-bafd-47a2-8bd6-c31e2132b09f\") " pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.306782 4810 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.306813 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.309656 4810 generic.go:334] "Generic (PLEG): container finished" podID="116ca285-75a8-411e-8be6-9f1f880c0576" containerID="b6209bd3d68a47fec30896eee705a702eafa978022a233c48ab306c62dd775e8" exitCode=0 Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.309826 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" event={"ID":"116ca285-75a8-411e-8be6-9f1f880c0576","Type":"ContainerDied","Data":"b6209bd3d68a47fec30896eee705a702eafa978022a233c48ab306c62dd775e8"} Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.336240 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mmgx4" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.336585 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.337088 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mmgx4" event={"ID":"4f374320-accd-46d6-a286-3f2bac0a4180","Type":"ContainerDied","Data":"420ac0101f4453afb5ba838daace34d1ead5f9677dceb8c99c6752400eca2534"} Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.337189 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="420ac0101f4453afb5ba838daace34d1ead5f9677dceb8c99c6752400eca2534" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.341358 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-69cffb6c97-gskt7"] Dec 03 06:00:15 crc kubenswrapper[4810]: E1203 06:00:15.341956 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-httpd" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.341970 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-httpd" Dec 03 06:00:15 crc kubenswrapper[4810]: E1203 06:00:15.342001 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f374320-accd-46d6-a286-3f2bac0a4180" containerName="keystone-bootstrap" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.342008 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f374320-accd-46d6-a286-3f2bac0a4180" containerName="keystone-bootstrap" Dec 03 06:00:15 crc kubenswrapper[4810]: E1203 06:00:15.342031 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-log" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.342036 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-log" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.342242 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f374320-accd-46d6-a286-3f2bac0a4180" containerName="keystone-bootstrap" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.342263 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-log" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.342276 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" containerName="glance-httpd" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.343033 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.348440 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.348461 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.349336 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.357210 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" event={"ID":"6a6861b3-17c5-4195-bfc4-f51c356d8698","Type":"ContainerStarted","Data":"2bc029b3fad1048217b6af50d1c756b6f7a700b77bdf11a1b0c56da63b5c4483"} Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.358383 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.365507 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-69cffb6c97-gskt7"] Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.370365 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f374320-accd-46d6-a286-3f2bac0a4180" (UID: "4f374320-accd-46d6-a286-3f2bac0a4180"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.377241 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-config-data" (OuterVolumeSpecName: "config-data") pod "4f374320-accd-46d6-a286-3f2bac0a4180" (UID: "4f374320-accd-46d6-a286-3f2bac0a4180"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.384509 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"031a2a5d-bba9-44a7-98b7-fe593eb22924","Type":"ContainerDied","Data":"56530b4a7ffb0e00ea273762ed355d1fb1029bc7efcb23cc2c107adb65f61cbb"} Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.384562 4810 scope.go:117] "RemoveContainer" containerID="250b2caa20e3f5a42488285a0297f855ccc0e6eecdcbe45a7119b4694e4fddd9" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.384793 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.396482 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" podStartSLOduration=3.396464823 podStartE2EDuration="3.396464823s" podCreationTimestamp="2025-12-03 06:00:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:15.395536169 +0000 UTC m=+1139.330997030" watchObservedRunningTime="2025-12-03 06:00:15.396464823 +0000 UTC m=+1139.331925664" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.402096 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nqvx4" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.410096 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.410132 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f374320-accd-46d6-a286-3f2bac0a4180-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.410149 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.434721 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.458834 4810 scope.go:117] "RemoveContainer" containerID="49feebb18b5746ef49b2dc7f494e1fcd90caa53c28b4a3aef20b676bb4776a5d" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.486017 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.509049 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.514975 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-combined-ca-bundle\") pod \"33fa5884-f2c0-4391-a719-81c4d43605dc\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515117 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln7qj\" (UniqueName: \"kubernetes.io/projected/33fa5884-f2c0-4391-a719-81c4d43605dc-kube-api-access-ln7qj\") pod \"33fa5884-f2c0-4391-a719-81c4d43605dc\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515284 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-db-sync-config-data\") pod \"33fa5884-f2c0-4391-a719-81c4d43605dc\" (UID: \"33fa5884-f2c0-4391-a719-81c4d43605dc\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515597 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-scripts\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515632 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-config-data\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515701 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-public-tls-certs\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-combined-ca-bundle\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515809 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmp6x\" (UniqueName: \"kubernetes.io/projected/16e8ad53-6c83-4176-94d2-e37a0ff234e2-kube-api-access-rmp6x\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515867 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-credential-keys\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515901 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-fernet-keys\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.515919 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-internal-tls-certs\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.541502 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33fa5884-f2c0-4391-a719-81c4d43605dc-kube-api-access-ln7qj" (OuterVolumeSpecName: "kube-api-access-ln7qj") pod "33fa5884-f2c0-4391-a719-81c4d43605dc" (UID: "33fa5884-f2c0-4391-a719-81c4d43605dc"). InnerVolumeSpecName "kube-api-access-ln7qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.541713 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "33fa5884-f2c0-4391-a719-81c4d43605dc" (UID: "33fa5884-f2c0-4391-a719-81c4d43605dc"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.547080 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:15 crc kubenswrapper[4810]: E1203 06:00:15.547662 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" containerName="init" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.547723 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" containerName="init" Dec 03 06:00:15 crc kubenswrapper[4810]: E1203 06:00:15.547803 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" containerName="dnsmasq-dns" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.547864 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" containerName="dnsmasq-dns" Dec 03 06:00:15 crc kubenswrapper[4810]: E1203 06:00:15.547941 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33fa5884-f2c0-4391-a719-81c4d43605dc" containerName="barbican-db-sync" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.548006 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="33fa5884-f2c0-4391-a719-81c4d43605dc" containerName="barbican-db-sync" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.548262 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="33fa5884-f2c0-4391-a719-81c4d43605dc" containerName="barbican-db-sync" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.548319 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" containerName="dnsmasq-dns" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.549469 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.555392 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.555696 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.556341 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.574706 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33fa5884-f2c0-4391-a719-81c4d43605dc" (UID: "33fa5884-f2c0-4391-a719-81c4d43605dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.617994 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-nb\") pod \"116ca285-75a8-411e-8be6-9f1f880c0576\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618086 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-sb\") pod \"116ca285-75a8-411e-8be6-9f1f880c0576\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618134 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-swift-storage-0\") pod \"116ca285-75a8-411e-8be6-9f1f880c0576\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618374 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-svc\") pod \"116ca285-75a8-411e-8be6-9f1f880c0576\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618418 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-config\") pod \"116ca285-75a8-411e-8be6-9f1f880c0576\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618496 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bmc9\" (UniqueName: \"kubernetes.io/projected/116ca285-75a8-411e-8be6-9f1f880c0576-kube-api-access-9bmc9\") pod \"116ca285-75a8-411e-8be6-9f1f880c0576\" (UID: \"116ca285-75a8-411e-8be6-9f1f880c0576\") " Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618801 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmp6x\" (UniqueName: \"kubernetes.io/projected/16e8ad53-6c83-4176-94d2-e37a0ff234e2-kube-api-access-rmp6x\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618851 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-credential-keys\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618870 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-fernet-keys\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618885 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-internal-tls-certs\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618954 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-scripts\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.618971 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-config-data\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.619014 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-public-tls-certs\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.619052 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-combined-ca-bundle\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.619126 4810 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.619140 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33fa5884-f2c0-4391-a719-81c4d43605dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.619150 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln7qj\" (UniqueName: \"kubernetes.io/projected/33fa5884-f2c0-4391-a719-81c4d43605dc-kube-api-access-ln7qj\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.627821 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-internal-tls-certs\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.630662 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/116ca285-75a8-411e-8be6-9f1f880c0576-kube-api-access-9bmc9" (OuterVolumeSpecName: "kube-api-access-9bmc9") pod "116ca285-75a8-411e-8be6-9f1f880c0576" (UID: "116ca285-75a8-411e-8be6-9f1f880c0576"). InnerVolumeSpecName "kube-api-access-9bmc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.643533 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-credential-keys\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.646137 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-scripts\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.652555 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-fernet-keys\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.660862 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-public-tls-certs\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.661015 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-combined-ca-bundle\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.661144 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e8ad53-6c83-4176-94d2-e37a0ff234e2-config-data\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.664695 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmp6x\" (UniqueName: \"kubernetes.io/projected/16e8ad53-6c83-4176-94d2-e37a0ff234e2-kube-api-access-rmp6x\") pod \"keystone-69cffb6c97-gskt7\" (UID: \"16e8ad53-6c83-4176-94d2-e37a0ff234e2\") " pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.683014 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726460 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r42mf\" (UniqueName: \"kubernetes.io/projected/23b10432-e146-4a0a-a0d7-793e0dae69a2-kube-api-access-r42mf\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726519 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726547 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-logs\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726569 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726608 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726626 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726683 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726709 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.726791 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bmc9\" (UniqueName: \"kubernetes.io/projected/116ca285-75a8-411e-8be6-9f1f880c0576-kube-api-access-9bmc9\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.736375 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "116ca285-75a8-411e-8be6-9f1f880c0576" (UID: "116ca285-75a8-411e-8be6-9f1f880c0576"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.762752 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "116ca285-75a8-411e-8be6-9f1f880c0576" (UID: "116ca285-75a8-411e-8be6-9f1f880c0576"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.773046 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "116ca285-75a8-411e-8be6-9f1f880c0576" (UID: "116ca285-75a8-411e-8be6-9f1f880c0576"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.779941 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "116ca285-75a8-411e-8be6-9f1f880c0576" (UID: "116ca285-75a8-411e-8be6-9f1f880c0576"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.795468 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-config" (OuterVolumeSpecName: "config") pod "116ca285-75a8-411e-8be6-9f1f880c0576" (UID: "116ca285-75a8-411e-8be6-9f1f880c0576"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.811659 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67f9f779cb-gh9cv"] Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.828929 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r42mf\" (UniqueName: \"kubernetes.io/projected/23b10432-e146-4a0a-a0d7-793e0dae69a2-kube-api-access-r42mf\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829006 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829039 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-logs\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829072 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829111 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829128 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829188 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829209 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829266 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829280 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829292 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829302 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829310 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116ca285-75a8-411e-8be6-9f1f880c0576-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.829982 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.830788 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.831291 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-logs\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.835185 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.836259 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.850693 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r42mf\" (UniqueName: \"kubernetes.io/projected/23b10432-e146-4a0a-a0d7-793e0dae69a2-kube-api-access-r42mf\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.857015 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.872095 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.891007 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:00:15 crc kubenswrapper[4810]: I1203 06:00:15.927999 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:00:15 crc kubenswrapper[4810]: W1203 06:00:15.958840 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf98f609_ab0e_41f9_8f8a_72324c3ac333.slice/crio-77c38bbd51dc7f339a49421385b170d114e4c3b76ab2fc6b516a428a5b8ce786 WatchSource:0}: Error finding container 77c38bbd51dc7f339a49421385b170d114e4c3b76ab2fc6b516a428a5b8ce786: Status 404 returned error can't find the container with id 77c38bbd51dc7f339a49421385b170d114e4c3b76ab2fc6b516a428a5b8ce786 Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.063086 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-74c9fd966f-8fc7k"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.189404 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.285874 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-69cffb6c97-gskt7"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.427045 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="031a2a5d-bba9-44a7-98b7-fe593eb22924" path="/var/lib/kubelet/pods/031a2a5d-bba9-44a7-98b7-fe593eb22924/volumes" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.473653 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67f9f779cb-gh9cv" event={"ID":"770e7dec-064e-4641-a94b-78121261d7cd","Type":"ContainerStarted","Data":"02057553802b101eb66b59b19151daed4c108bf5017451eaf24e4d0862d36e7a"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.491034 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67f9f779cb-gh9cv" event={"ID":"770e7dec-064e-4641-a94b-78121261d7cd","Type":"ContainerStarted","Data":"b3a781fcf42893e315a2e995f6c7644982c179849ef51852e8161be5a5b0bdea"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.511515 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-nqvx4" event={"ID":"33fa5884-f2c0-4391-a719-81c4d43605dc","Type":"ContainerDied","Data":"67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.511563 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.511840 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-nqvx4" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.532878 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-69cffb6c97-gskt7" event={"ID":"16e8ad53-6c83-4176-94d2-e37a0ff234e2","Type":"ContainerStarted","Data":"1295552d6be9de6af94526195c348f529889a3924cdf10384edb7fbef2b4429b"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.538908 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74c9fd966f-8fc7k" event={"ID":"411b48fc-bafd-47a2-8bd6-c31e2132b09f","Type":"ContainerStarted","Data":"066fddb3b9ad86b753cd9f99e9d8ef45308384db4aa791b6eaeaff001c0c8294"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.538948 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74c9fd966f-8fc7k" event={"ID":"411b48fc-bafd-47a2-8bd6-c31e2132b09f","Type":"ContainerStarted","Data":"4bf56ca1c9cb07007568488bfb0c993b33041b986edbee4b01d0a84888c01622"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.546125 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf98f609-ab0e-41f9-8f8a-72324c3ac333","Type":"ContainerStarted","Data":"77c38bbd51dc7f339a49421385b170d114e4c3b76ab2fc6b516a428a5b8ce786"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.557833 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.557951 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5584c78d67-wkldn" event={"ID":"116ca285-75a8-411e-8be6-9f1f880c0576","Type":"ContainerDied","Data":"5e4bec0350b16e855ede9d60ebb00cadc12123e7f4fb32000e9b81d4a18a02c3"} Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.558048 4810 scope.go:117] "RemoveContainer" containerID="b6209bd3d68a47fec30896eee705a702eafa978022a233c48ab306c62dd775e8" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.630386 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5584c78d67-wkldn"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.644441 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5584c78d67-wkldn"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.665592 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-856ff6b4bf-thl85"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.667890 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.676331 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cnv4h" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.678098 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-config-data\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.678156 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98cjp\" (UniqueName: \"kubernetes.io/projected/9d889657-930a-4878-8727-91b0ab50723c-kube-api-access-98cjp\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.678184 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-config-data-custom\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.678236 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-combined-ca-bundle\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.678314 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d889657-930a-4878-8727-91b0ab50723c-logs\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.679445 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.683080 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-856ff6b4bf-thl85"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.695657 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.753022 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-75bb559794-4gn48"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.761834 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.772216 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782055 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-config-data\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782117 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-config-data\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782161 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98cjp\" (UniqueName: \"kubernetes.io/projected/9d889657-930a-4878-8727-91b0ab50723c-kube-api-access-98cjp\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782185 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-config-data-custom\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782221 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3260c501-1348-49f4-8182-437086a5649e-logs\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782243 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-config-data-custom\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782281 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-combined-ca-bundle\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782304 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s79qr\" (UniqueName: \"kubernetes.io/projected/3260c501-1348-49f4-8182-437086a5649e-kube-api-access-s79qr\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782362 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-combined-ca-bundle\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.782398 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d889657-930a-4878-8727-91b0ab50723c-logs\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.783057 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d889657-930a-4878-8727-91b0ab50723c-logs\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.788886 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-75bb559794-4gn48"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.807107 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-config-data-custom\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.807992 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-config-data\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.809645 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d889657-930a-4878-8727-91b0ab50723c-combined-ca-bundle\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.842959 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98cjp\" (UniqueName: \"kubernetes.io/projected/9d889657-930a-4878-8727-91b0ab50723c-kube-api-access-98cjp\") pod \"barbican-worker-856ff6b4bf-thl85\" (UID: \"9d889657-930a-4878-8727-91b0ab50723c\") " pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.852948 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c9c8b6bc-hnh9h"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.878103 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d54bd6d5f-lrqmg"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.884459 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-config-data\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.884543 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3260c501-1348-49f4-8182-437086a5649e-logs\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.884570 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-config-data-custom\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.884617 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s79qr\" (UniqueName: \"kubernetes.io/projected/3260c501-1348-49f4-8182-437086a5649e-kube-api-access-s79qr\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.887527 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3260c501-1348-49f4-8182-437086a5649e-logs\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.894712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-combined-ca-bundle\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.900578 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-combined-ca-bundle\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.905320 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.911896 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-config-data\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.947706 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3260c501-1348-49f4-8182-437086a5649e-config-data-custom\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.956969 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d54bd6d5f-lrqmg"] Dec 03 06:00:16 crc kubenswrapper[4810]: I1203 06:00:16.964360 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s79qr\" (UniqueName: \"kubernetes.io/projected/3260c501-1348-49f4-8182-437086a5649e-kube-api-access-s79qr\") pod \"barbican-keystone-listener-75bb559794-4gn48\" (UID: \"3260c501-1348-49f4-8182-437086a5649e\") " pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:16.998533 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-856ff6b4bf-thl85" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.014518 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-sb\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.014596 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-nb\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.014763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-config\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.014807 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc5pt\" (UniqueName: \"kubernetes.io/projected/25771020-a167-4b77-a3c9-ccc65dd8d6df-kube-api-access-wc5pt\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.014834 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-svc\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.014873 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-swift-storage-0\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.020549 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-75bb559794-4gn48" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.025122 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.067496 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6994c56bc4-rkwdw"] Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.086785 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.091316 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.102844 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6994c56bc4-rkwdw"] Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116278 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data-custom\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116340 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-sb\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-nb\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116483 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhsdf\" (UniqueName: \"kubernetes.io/projected/5c435fcb-cba8-43e8-b6ea-f273647f0264-kube-api-access-vhsdf\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116527 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116566 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c435fcb-cba8-43e8-b6ea-f273647f0264-logs\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116602 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-combined-ca-bundle\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116649 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-config\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116682 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc5pt\" (UniqueName: \"kubernetes.io/projected/25771020-a167-4b77-a3c9-ccc65dd8d6df-kube-api-access-wc5pt\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116703 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-svc\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.116750 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-swift-storage-0\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.117964 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-sb\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.118463 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-nb\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.120717 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-config\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.121084 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-svc\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.121128 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-swift-storage-0\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.148823 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc5pt\" (UniqueName: \"kubernetes.io/projected/25771020-a167-4b77-a3c9-ccc65dd8d6df-kube-api-access-wc5pt\") pod \"dnsmasq-dns-d54bd6d5f-lrqmg\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.221021 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.221091 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c435fcb-cba8-43e8-b6ea-f273647f0264-logs\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.221123 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-combined-ca-bundle\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.221188 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data-custom\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.221258 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhsdf\" (UniqueName: \"kubernetes.io/projected/5c435fcb-cba8-43e8-b6ea-f273647f0264-kube-api-access-vhsdf\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.223953 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c435fcb-cba8-43e8-b6ea-f273647f0264-logs\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.240928 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data-custom\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.242217 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-combined-ca-bundle\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.245365 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhsdf\" (UniqueName: \"kubernetes.io/projected/5c435fcb-cba8-43e8-b6ea-f273647f0264-kube-api-access-vhsdf\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.248982 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data\") pod \"barbican-api-6994c56bc4-rkwdw\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.343356 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.425072 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.570078 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf98f609-ab0e-41f9-8f8a-72324c3ac333","Type":"ContainerStarted","Data":"1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009"} Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.576932 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67f9f779cb-gh9cv" event={"ID":"770e7dec-064e-4641-a94b-78121261d7cd","Type":"ContainerStarted","Data":"b857e707c6cc4e48c6e8eb4b84c1cb6461679e7ad9a44041a18b4828977aff7c"} Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.577007 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.577023 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:17 crc kubenswrapper[4810]: I1203 06:00:17.612479 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-67f9f779cb-gh9cv" podStartSLOduration=3.612451836 podStartE2EDuration="3.612451836s" podCreationTimestamp="2025-12-03 06:00:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:17.596355834 +0000 UTC m=+1141.531816685" watchObservedRunningTime="2025-12-03 06:00:17.612451836 +0000 UTC m=+1141.547912677" Dec 03 06:00:18 crc kubenswrapper[4810]: I1203 06:00:18.390497 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="116ca285-75a8-411e-8be6-9f1f880c0576" path="/var/lib/kubelet/pods/116ca285-75a8-411e-8be6-9f1f880c0576/volumes" Dec 03 06:00:18 crc kubenswrapper[4810]: I1203 06:00:18.583372 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerName="dnsmasq-dns" containerID="cri-o://2bc029b3fad1048217b6af50d1c756b6f7a700b77bdf11a1b0c56da63b5c4483" gracePeriod=10 Dec 03 06:00:18 crc kubenswrapper[4810]: I1203 06:00:18.861507 4810 scope.go:117] "RemoveContainer" containerID="b20fd1ad95bd91978406fbfd644d07a292e88577040e6216610eeb57260877a9" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.463722 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-c9578688-qf8tw"] Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.465585 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.468782 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.469940 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.493397 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c9578688-qf8tw"] Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597295 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvfzt\" (UniqueName: \"kubernetes.io/projected/3554de12-10d1-48a4-a17e-d5ce9955fa9c-kube-api-access-xvfzt\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597393 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-combined-ca-bundle\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597419 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-config-data\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597440 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-internal-tls-certs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597505 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-config-data-custom\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597522 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3554de12-10d1-48a4-a17e-d5ce9955fa9c-logs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.597547 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-public-tls-certs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.606942 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23b10432-e146-4a0a-a0d7-793e0dae69a2","Type":"ContainerStarted","Data":"d500def455109b885f461df76ccd4445175d06374a96d4c0c24eebb159eaef3e"} Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.619501 4810 generic.go:334] "Generic (PLEG): container finished" podID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerID="2bc029b3fad1048217b6af50d1c756b6f7a700b77bdf11a1b0c56da63b5c4483" exitCode=0 Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.619586 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" event={"ID":"6a6861b3-17c5-4195-bfc4-f51c356d8698","Type":"ContainerDied","Data":"2bc029b3fad1048217b6af50d1c756b6f7a700b77bdf11a1b0c56da63b5c4483"} Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.701103 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-config-data-custom\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.701159 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3554de12-10d1-48a4-a17e-d5ce9955fa9c-logs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.701205 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-public-tls-certs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.701799 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3554de12-10d1-48a4-a17e-d5ce9955fa9c-logs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.706905 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvfzt\" (UniqueName: \"kubernetes.io/projected/3554de12-10d1-48a4-a17e-d5ce9955fa9c-kube-api-access-xvfzt\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.707200 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-combined-ca-bundle\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.707260 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-config-data\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.707316 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-internal-tls-certs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.709911 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-config-data-custom\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.711568 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-internal-tls-certs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.713900 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-public-tls-certs\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.722711 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-combined-ca-bundle\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.726573 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvfzt\" (UniqueName: \"kubernetes.io/projected/3554de12-10d1-48a4-a17e-d5ce9955fa9c-kube-api-access-xvfzt\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.740914 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3554de12-10d1-48a4-a17e-d5ce9955fa9c-config-data\") pod \"barbican-api-c9578688-qf8tw\" (UID: \"3554de12-10d1-48a4-a17e-d5ce9955fa9c\") " pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:19 crc kubenswrapper[4810]: I1203 06:00:19.785650 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:21 crc kubenswrapper[4810]: I1203 06:00:21.924587 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.070032 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-swift-storage-0\") pod \"6a6861b3-17c5-4195-bfc4-f51c356d8698\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.070604 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2xdh\" (UniqueName: \"kubernetes.io/projected/6a6861b3-17c5-4195-bfc4-f51c356d8698-kube-api-access-r2xdh\") pod \"6a6861b3-17c5-4195-bfc4-f51c356d8698\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.070678 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-config\") pod \"6a6861b3-17c5-4195-bfc4-f51c356d8698\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.070786 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-svc\") pod \"6a6861b3-17c5-4195-bfc4-f51c356d8698\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.070890 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-sb\") pod \"6a6861b3-17c5-4195-bfc4-f51c356d8698\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.071001 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-nb\") pod \"6a6861b3-17c5-4195-bfc4-f51c356d8698\" (UID: \"6a6861b3-17c5-4195-bfc4-f51c356d8698\") " Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.078393 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6861b3-17c5-4195-bfc4-f51c356d8698-kube-api-access-r2xdh" (OuterVolumeSpecName: "kube-api-access-r2xdh") pod "6a6861b3-17c5-4195-bfc4-f51c356d8698" (UID: "6a6861b3-17c5-4195-bfc4-f51c356d8698"). InnerVolumeSpecName "kube-api-access-r2xdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.185678 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2xdh\" (UniqueName: \"kubernetes.io/projected/6a6861b3-17c5-4195-bfc4-f51c356d8698-kube-api-access-r2xdh\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.190298 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a6861b3-17c5-4195-bfc4-f51c356d8698" (UID: "6a6861b3-17c5-4195-bfc4-f51c356d8698"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.230104 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6a6861b3-17c5-4195-bfc4-f51c356d8698" (UID: "6a6861b3-17c5-4195-bfc4-f51c356d8698"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.241208 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6a6861b3-17c5-4195-bfc4-f51c356d8698" (UID: "6a6861b3-17c5-4195-bfc4-f51c356d8698"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.247292 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6a6861b3-17c5-4195-bfc4-f51c356d8698" (UID: "6a6861b3-17c5-4195-bfc4-f51c356d8698"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.262419 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-config" (OuterVolumeSpecName: "config") pod "6a6861b3-17c5-4195-bfc4-f51c356d8698" (UID: "6a6861b3-17c5-4195-bfc4-f51c356d8698"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.295250 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.295282 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.295293 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.295308 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.295318 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a6861b3-17c5-4195-bfc4-f51c356d8698-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.415439 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6994c56bc4-rkwdw"] Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.626815 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-75bb559794-4gn48"] Dec 03 06:00:22 crc kubenswrapper[4810]: W1203 06:00:22.641623 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3260c501_1348_49f4_8182_437086a5649e.slice/crio-a5612e8f0b14d8301f5a2d64d75b0a2c321a1dc161072e756f733f9e154630b8 WatchSource:0}: Error finding container a5612e8f0b14d8301f5a2d64d75b0a2c321a1dc161072e756f733f9e154630b8: Status 404 returned error can't find the container with id a5612e8f0b14d8301f5a2d64d75b0a2c321a1dc161072e756f733f9e154630b8 Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.666069 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-856ff6b4bf-thl85"] Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.674000 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerStarted","Data":"b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520"} Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.676977 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-69cffb6c97-gskt7" event={"ID":"16e8ad53-6c83-4176-94d2-e37a0ff234e2","Type":"ContainerStarted","Data":"393b0b99f8c4050b17fdf899090c2385e17355589b482838adee727f7f26935d"} Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.678313 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.682720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74c9fd966f-8fc7k" event={"ID":"411b48fc-bafd-47a2-8bd6-c31e2132b09f","Type":"ContainerStarted","Data":"b8e968b5a5a8a4e7bf4e834027d01550e3e49abe368c96caf743a3f9d8f27979"} Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.683464 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.688795 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6994c56bc4-rkwdw" event={"ID":"5c435fcb-cba8-43e8-b6ea-f273647f0264","Type":"ContainerStarted","Data":"4ccd99302a1f46586ab86d209716d01f69b3f0e9ed22a8d9fcf239c36a85fc7b"} Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.694483 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" event={"ID":"6a6861b3-17c5-4195-bfc4-f51c356d8698","Type":"ContainerDied","Data":"c72f6de2be0b8a1c282ed96972dd8d1df2e5c0003205c88e028f76d0824437af"} Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.694518 4810 scope.go:117] "RemoveContainer" containerID="2bc029b3fad1048217b6af50d1c756b6f7a700b77bdf11a1b0c56da63b5c4483" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.694619 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c9c8b6bc-hnh9h" Dec 03 06:00:22 crc kubenswrapper[4810]: W1203 06:00:22.708074 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d889657_930a_4878_8727_91b0ab50723c.slice/crio-68dd4d34d21a8070e278a0d57a3295c0dc036c86c14ef4d6e567278adff27879 WatchSource:0}: Error finding container 68dd4d34d21a8070e278a0d57a3295c0dc036c86c14ef4d6e567278adff27879: Status 404 returned error can't find the container with id 68dd4d34d21a8070e278a0d57a3295c0dc036c86c14ef4d6e567278adff27879 Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.718344 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-69cffb6c97-gskt7" podStartSLOduration=7.718318804 podStartE2EDuration="7.718318804s" podCreationTimestamp="2025-12-03 06:00:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:22.706973877 +0000 UTC m=+1146.642434718" watchObservedRunningTime="2025-12-03 06:00:22.718318804 +0000 UTC m=+1146.653779645" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.742420 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-74c9fd966f-8fc7k" podStartSLOduration=8.742392854 podStartE2EDuration="8.742392854s" podCreationTimestamp="2025-12-03 06:00:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:22.726408946 +0000 UTC m=+1146.661869787" watchObservedRunningTime="2025-12-03 06:00:22.742392854 +0000 UTC m=+1146.677853695" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.767093 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c9c8b6bc-hnh9h"] Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.774244 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c9c8b6bc-hnh9h"] Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.786890 4810 scope.go:117] "RemoveContainer" containerID="1c90f16c349e6679dfefbb87139b5e695c20fabf7c95029041afcf0796a8ce39" Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.860030 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d54bd6d5f-lrqmg"] Dec 03 06:00:22 crc kubenswrapper[4810]: I1203 06:00:22.949347 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c9578688-qf8tw"] Dec 03 06:00:23 crc kubenswrapper[4810]: I1203 06:00:23.726170 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-856ff6b4bf-thl85" event={"ID":"9d889657-930a-4878-8727-91b0ab50723c","Type":"ContainerStarted","Data":"68dd4d34d21a8070e278a0d57a3295c0dc036c86c14ef4d6e567278adff27879"} Dec 03 06:00:23 crc kubenswrapper[4810]: I1203 06:00:23.727679 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-75bb559794-4gn48" event={"ID":"3260c501-1348-49f4-8182-437086a5649e","Type":"ContainerStarted","Data":"a5612e8f0b14d8301f5a2d64d75b0a2c321a1dc161072e756f733f9e154630b8"} Dec 03 06:00:23 crc kubenswrapper[4810]: I1203 06:00:23.730505 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6994c56bc4-rkwdw" event={"ID":"5c435fcb-cba8-43e8-b6ea-f273647f0264","Type":"ContainerStarted","Data":"4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c"} Dec 03 06:00:23 crc kubenswrapper[4810]: I1203 06:00:23.733257 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" event={"ID":"25771020-a167-4b77-a3c9-ccc65dd8d6df","Type":"ContainerStarted","Data":"f02dce24c177969869bdb2cbe9abe1c0a1f0b3abd0c7fec4e36b165dd1b31ed1"} Dec 03 06:00:23 crc kubenswrapper[4810]: I1203 06:00:23.738196 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c9578688-qf8tw" event={"ID":"3554de12-10d1-48a4-a17e-d5ce9955fa9c","Type":"ContainerStarted","Data":"f645e66c5e81cb2ebb2516e9bc800bfa67bd32145efabee604dd108c9c50883f"} Dec 03 06:00:23 crc kubenswrapper[4810]: E1203 06:00:23.813346 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846\": RecentStats: unable to find data in memory cache]" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.400674 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" path="/var/lib/kubelet/pods/6a6861b3-17c5-4195-bfc4-f51c356d8698/volumes" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.755648 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf98f609-ab0e-41f9-8f8a-72324c3ac333","Type":"ContainerStarted","Data":"6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd"} Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.762424 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-z4zwh" event={"ID":"36ce16c9-937b-4bdb-b5b1-09003d013c3a","Type":"ContainerStarted","Data":"faf1e5fdae941d52e0f432a48746ab6949f34b8a0329cf14d0f000796529f290"} Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.793096 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6994c56bc4-rkwdw" event={"ID":"5c435fcb-cba8-43e8-b6ea-f273647f0264","Type":"ContainerStarted","Data":"c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f"} Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.795071 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.795116 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.800664 4810 generic.go:334] "Generic (PLEG): container finished" podID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerID="3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804" exitCode=0 Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.800774 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" event={"ID":"25771020-a167-4b77-a3c9-ccc65dd8d6df","Type":"ContainerDied","Data":"3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804"} Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.803647 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.803618187 podStartE2EDuration="10.803618187s" podCreationTimestamp="2025-12-03 06:00:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:24.791978462 +0000 UTC m=+1148.727439323" watchObservedRunningTime="2025-12-03 06:00:24.803618187 +0000 UTC m=+1148.739079028" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.804682 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23b10432-e146-4a0a-a0d7-793e0dae69a2","Type":"ContainerStarted","Data":"7387ca5156bc4b58fbce2b30e2c7926eb0216ddca329561eb212067957565b08"} Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.808272 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c9578688-qf8tw" event={"ID":"3554de12-10d1-48a4-a17e-d5ce9955fa9c","Type":"ContainerStarted","Data":"c6caeea64a0d2647945a30bba08424b01f41d8851b51503e7cc66838fa474b41"} Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.817679 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-z4zwh" podStartSLOduration=7.891824877 podStartE2EDuration="46.817652764s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="2025-12-03 05:59:43.211117325 +0000 UTC m=+1107.146578156" lastFinishedPulling="2025-12-03 06:00:22.136945202 +0000 UTC m=+1146.072406043" observedRunningTime="2025-12-03 06:00:24.814192564 +0000 UTC m=+1148.749653405" watchObservedRunningTime="2025-12-03 06:00:24.817652764 +0000 UTC m=+1148.753113615" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.875437 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6994c56bc4-rkwdw" podStartSLOduration=8.875406445 podStartE2EDuration="8.875406445s" podCreationTimestamp="2025-12-03 06:00:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:24.861633465 +0000 UTC m=+1148.797094326" watchObservedRunningTime="2025-12-03 06:00:24.875406445 +0000 UTC m=+1148.810867296" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.917057 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.917114 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.967022 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 06:00:24 crc kubenswrapper[4810]: I1203 06:00:24.972398 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.677970 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.678567 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.822682 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" event={"ID":"25771020-a167-4b77-a3c9-ccc65dd8d6df","Type":"ContainerStarted","Data":"e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de"} Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.823062 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.841601 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c9578688-qf8tw" event={"ID":"3554de12-10d1-48a4-a17e-d5ce9955fa9c","Type":"ContainerStarted","Data":"81f5049c713556304cc0eb0c9c0ddf5a16fa2c46b2f795e062a593e749b3130c"} Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.841651 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.842527 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.842556 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.842566 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.857854 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" podStartSLOduration=9.857836121 podStartE2EDuration="9.857836121s" podCreationTimestamp="2025-12-03 06:00:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:25.857675447 +0000 UTC m=+1149.793136288" watchObservedRunningTime="2025-12-03 06:00:25.857836121 +0000 UTC m=+1149.793296962" Dec 03 06:00:25 crc kubenswrapper[4810]: I1203 06:00:25.903233 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-c9578688-qf8tw" podStartSLOduration=6.903205839 podStartE2EDuration="6.903205839s" podCreationTimestamp="2025-12-03 06:00:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:25.879321294 +0000 UTC m=+1149.814782135" watchObservedRunningTime="2025-12-03 06:00:25.903205839 +0000 UTC m=+1149.838666680" Dec 03 06:00:26 crc kubenswrapper[4810]: I1203 06:00:26.855800 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-75bb559794-4gn48" event={"ID":"3260c501-1348-49f4-8182-437086a5649e","Type":"ContainerStarted","Data":"be6035cc5d6ea5005c1bf0a1fbde17984f98648094db811aeb1730c9ea59907d"} Dec 03 06:00:26 crc kubenswrapper[4810]: I1203 06:00:26.862271 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23b10432-e146-4a0a-a0d7-793e0dae69a2","Type":"ContainerStarted","Data":"3d23e0239bf0b68767a533f36795e571d2ac50c11a04b4954b8f17883a84b3f4"} Dec 03 06:00:26 crc kubenswrapper[4810]: I1203 06:00:26.868438 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-856ff6b4bf-thl85" event={"ID":"9d889657-930a-4878-8727-91b0ab50723c","Type":"ContainerStarted","Data":"73e4aa52b7bf5fb7868e75aaec054b3c73d668fdc54386b686cf05b0da94c677"} Dec 03 06:00:26 crc kubenswrapper[4810]: I1203 06:00:26.895634 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.895605216 podStartE2EDuration="11.895605216s" podCreationTimestamp="2025-12-03 06:00:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:26.882857143 +0000 UTC m=+1150.818317994" watchObservedRunningTime="2025-12-03 06:00:26.895605216 +0000 UTC m=+1150.831066057" Dec 03 06:00:27 crc kubenswrapper[4810]: I1203 06:00:27.886276 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-856ff6b4bf-thl85" event={"ID":"9d889657-930a-4878-8727-91b0ab50723c","Type":"ContainerStarted","Data":"6c16911e53b704cf1e0b8e4e15945fb8c9b135c3756353b6b1a760736a84ca65"} Dec 03 06:00:27 crc kubenswrapper[4810]: I1203 06:00:27.890833 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 06:00:27 crc kubenswrapper[4810]: I1203 06:00:27.892130 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-75bb559794-4gn48" event={"ID":"3260c501-1348-49f4-8182-437086a5649e","Type":"ContainerStarted","Data":"d87cfab12615a40b1753acdafc7b8bce8957bd1fc7bb09f2ccdda17d5396b196"} Dec 03 06:00:27 crc kubenswrapper[4810]: I1203 06:00:27.919032 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-856ff6b4bf-thl85" podStartSLOduration=9.409557663 podStartE2EDuration="11.919007094s" podCreationTimestamp="2025-12-03 06:00:16 +0000 UTC" firstStartedPulling="2025-12-03 06:00:22.727211227 +0000 UTC m=+1146.662672058" lastFinishedPulling="2025-12-03 06:00:25.236660648 +0000 UTC m=+1149.172121489" observedRunningTime="2025-12-03 06:00:27.909724811 +0000 UTC m=+1151.845185692" watchObservedRunningTime="2025-12-03 06:00:27.919007094 +0000 UTC m=+1151.854467955" Dec 03 06:00:27 crc kubenswrapper[4810]: I1203 06:00:27.924135 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 06:00:27 crc kubenswrapper[4810]: I1203 06:00:27.943708 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-75bb559794-4gn48" podStartSLOduration=9.402268152 podStartE2EDuration="11.94368729s" podCreationTimestamp="2025-12-03 06:00:16 +0000 UTC" firstStartedPulling="2025-12-03 06:00:22.695053795 +0000 UTC m=+1146.630514636" lastFinishedPulling="2025-12-03 06:00:25.236472933 +0000 UTC m=+1149.171933774" observedRunningTime="2025-12-03 06:00:27.941987425 +0000 UTC m=+1151.877448306" watchObservedRunningTime="2025-12-03 06:00:27.94368729 +0000 UTC m=+1151.879148131" Dec 03 06:00:29 crc kubenswrapper[4810]: I1203 06:00:29.852498 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 06:00:30 crc kubenswrapper[4810]: I1203 06:00:30.934741 4810 generic.go:334] "Generic (PLEG): container finished" podID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" containerID="faf1e5fdae941d52e0f432a48746ab6949f34b8a0329cf14d0f000796529f290" exitCode=0 Dec 03 06:00:30 crc kubenswrapper[4810]: I1203 06:00:30.934800 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-z4zwh" event={"ID":"36ce16c9-937b-4bdb-b5b1-09003d013c3a","Type":"ContainerDied","Data":"faf1e5fdae941d52e0f432a48746ab6949f34b8a0329cf14d0f000796529f290"} Dec 03 06:00:31 crc kubenswrapper[4810]: I1203 06:00:31.140512 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.356949 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.472703 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f45869c47-p6bn8"] Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.473014 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerName="dnsmasq-dns" containerID="cri-o://6b553e38d4f7db49c4d91fb17bfd327e691b96fb600c6f80ca38c15d963aafb3" gracePeriod=10 Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.692301 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-c9578688-qf8tw" Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.758714 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6994c56bc4-rkwdw"] Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.759011 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" containerID="cri-o://4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c" gracePeriod=30 Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.759476 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" containerID="cri-o://c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f" gracePeriod=30 Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.781809 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": EOF" Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.781866 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": EOF" Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.781821 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": EOF" Dec 03 06:00:32 crc kubenswrapper[4810]: I1203 06:00:32.781932 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": EOF" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.008368 4810 generic.go:334] "Generic (PLEG): container finished" podID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerID="6b553e38d4f7db49c4d91fb17bfd327e691b96fb600c6f80ca38c15d963aafb3" exitCode=0 Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.009046 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" event={"ID":"fbca557f-95c4-460b-9d6d-8cd23b748c5b","Type":"ContainerDied","Data":"6b553e38d4f7db49c4d91fb17bfd327e691b96fb600c6f80ca38c15d963aafb3"} Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.020081 4810 generic.go:334] "Generic (PLEG): container finished" podID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerID="4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c" exitCode=143 Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.020165 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6994c56bc4-rkwdw" event={"ID":"5c435fcb-cba8-43e8-b6ea-f273647f0264","Type":"ContainerDied","Data":"4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c"} Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.469235 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-z4zwh" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.616637 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-scripts\") pod \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.616803 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s575h\" (UniqueName: \"kubernetes.io/projected/36ce16c9-937b-4bdb-b5b1-09003d013c3a-kube-api-access-s575h\") pod \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.616910 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-combined-ca-bundle\") pod \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.617023 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-config-data\") pod \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.617046 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-db-sync-config-data\") pod \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.617145 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ce16c9-937b-4bdb-b5b1-09003d013c3a-etc-machine-id\") pod \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\" (UID: \"36ce16c9-937b-4bdb-b5b1-09003d013c3a\") " Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.617676 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36ce16c9-937b-4bdb-b5b1-09003d013c3a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "36ce16c9-937b-4bdb-b5b1-09003d013c3a" (UID: "36ce16c9-937b-4bdb-b5b1-09003d013c3a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.630020 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ce16c9-937b-4bdb-b5b1-09003d013c3a-kube-api-access-s575h" (OuterVolumeSpecName: "kube-api-access-s575h") pod "36ce16c9-937b-4bdb-b5b1-09003d013c3a" (UID: "36ce16c9-937b-4bdb-b5b1-09003d013c3a"). InnerVolumeSpecName "kube-api-access-s575h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.650543 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "36ce16c9-937b-4bdb-b5b1-09003d013c3a" (UID: "36ce16c9-937b-4bdb-b5b1-09003d013c3a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.663091 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-scripts" (OuterVolumeSpecName: "scripts") pod "36ce16c9-937b-4bdb-b5b1-09003d013c3a" (UID: "36ce16c9-937b-4bdb-b5b1-09003d013c3a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.668078 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36ce16c9-937b-4bdb-b5b1-09003d013c3a" (UID: "36ce16c9-937b-4bdb-b5b1-09003d013c3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.692116 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-config-data" (OuterVolumeSpecName: "config-data") pod "36ce16c9-937b-4bdb-b5b1-09003d013c3a" (UID: "36ce16c9-937b-4bdb-b5b1-09003d013c3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.720502 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.720537 4810 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.720550 4810 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ce16c9-937b-4bdb-b5b1-09003d013c3a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.720585 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.720594 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s575h\" (UniqueName: \"kubernetes.io/projected/36ce16c9-937b-4bdb-b5b1-09003d013c3a-kube-api-access-s575h\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.720603 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ce16c9-937b-4bdb-b5b1-09003d013c3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:33 crc kubenswrapper[4810]: I1203 06:00:33.960315 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.069591 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerStarted","Data":"ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a"} Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.069815 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-central-agent" containerID="cri-o://513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05" gracePeriod=30 Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.070163 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.070546 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="proxy-httpd" containerID="cri-o://ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a" gracePeriod=30 Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.070603 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="sg-core" containerID="cri-o://b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520" gracePeriod=30 Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.070642 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-notification-agent" containerID="cri-o://2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194" gracePeriod=30 Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.086041 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" event={"ID":"fbca557f-95c4-460b-9d6d-8cd23b748c5b","Type":"ContainerDied","Data":"5c72ee5fe6afc8b61f6ae61d63e7cc990aee14fc3ccb175346508ddf8d7cdc30"} Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.086761 4810 scope.go:117] "RemoveContainer" containerID="6b553e38d4f7db49c4d91fb17bfd327e691b96fb600c6f80ca38c15d963aafb3" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.087666 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f45869c47-p6bn8" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.123597 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-z4zwh" event={"ID":"36ce16c9-937b-4bdb-b5b1-09003d013c3a","Type":"ContainerDied","Data":"8d9452054e854722680526f55c1568028f035adec52559df7818e8f3fc4b2899"} Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.123675 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d9452054e854722680526f55c1568028f035adec52559df7818e8f3fc4b2899" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.123821 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-z4zwh" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.147704 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-swift-storage-0\") pod \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.147962 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-config\") pod \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.148047 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-sb\") pod \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.148081 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-nb\") pod \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.148198 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-svc\") pod \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.148237 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x84gw\" (UniqueName: \"kubernetes.io/projected/fbca557f-95c4-460b-9d6d-8cd23b748c5b-kube-api-access-x84gw\") pod \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\" (UID: \"fbca557f-95c4-460b-9d6d-8cd23b748c5b\") " Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.155625 4810 scope.go:117] "RemoveContainer" containerID="590cf4c70389880517781040d69583bd1f2ed1cfab67f26aba9717b0adc59daa" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.165695 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.416325865 podStartE2EDuration="56.165633261s" podCreationTimestamp="2025-12-03 05:59:38 +0000 UTC" firstStartedPulling="2025-12-03 05:59:42.724554664 +0000 UTC m=+1106.660015505" lastFinishedPulling="2025-12-03 06:00:33.47386206 +0000 UTC m=+1157.409322901" observedRunningTime="2025-12-03 06:00:34.109939394 +0000 UTC m=+1158.045400235" watchObservedRunningTime="2025-12-03 06:00:34.165633261 +0000 UTC m=+1158.101094112" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.179931 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbca557f-95c4-460b-9d6d-8cd23b748c5b-kube-api-access-x84gw" (OuterVolumeSpecName: "kube-api-access-x84gw") pod "fbca557f-95c4-460b-9d6d-8cd23b748c5b" (UID: "fbca557f-95c4-460b-9d6d-8cd23b748c5b"). InnerVolumeSpecName "kube-api-access-x84gw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:34 crc kubenswrapper[4810]: E1203 06:00:34.229380 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846\": RecentStats: unable to find data in memory cache]" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.250372 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x84gw\" (UniqueName: \"kubernetes.io/projected/fbca557f-95c4-460b-9d6d-8cd23b748c5b-kube-api-access-x84gw\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.252293 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fbca557f-95c4-460b-9d6d-8cd23b748c5b" (UID: "fbca557f-95c4-460b-9d6d-8cd23b748c5b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.271511 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fbca557f-95c4-460b-9d6d-8cd23b748c5b" (UID: "fbca557f-95c4-460b-9d6d-8cd23b748c5b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.271950 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fbca557f-95c4-460b-9d6d-8cd23b748c5b" (UID: "fbca557f-95c4-460b-9d6d-8cd23b748c5b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.275882 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fbca557f-95c4-460b-9d6d-8cd23b748c5b" (UID: "fbca557f-95c4-460b-9d6d-8cd23b748c5b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.279456 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-config" (OuterVolumeSpecName: "config") pod "fbca557f-95c4-460b-9d6d-8cd23b748c5b" (UID: "fbca557f-95c4-460b-9d6d-8cd23b748c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.352236 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.352276 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.352287 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.352296 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.352307 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbca557f-95c4-460b-9d6d-8cd23b748c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.444650 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f45869c47-p6bn8"] Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.451115 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f45869c47-p6bn8"] Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.856913 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:34 crc kubenswrapper[4810]: E1203 06:00:34.857415 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerName="dnsmasq-dns" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857431 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerName="dnsmasq-dns" Dec 03 06:00:34 crc kubenswrapper[4810]: E1203 06:00:34.857456 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerName="dnsmasq-dns" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857463 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerName="dnsmasq-dns" Dec 03 06:00:34 crc kubenswrapper[4810]: E1203 06:00:34.857494 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerName="init" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857500 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerName="init" Dec 03 06:00:34 crc kubenswrapper[4810]: E1203 06:00:34.857508 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" containerName="cinder-db-sync" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857514 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" containerName="cinder-db-sync" Dec 03 06:00:34 crc kubenswrapper[4810]: E1203 06:00:34.857531 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerName="init" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857536 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerName="init" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857714 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" containerName="dnsmasq-dns" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857725 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" containerName="cinder-db-sync" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.857749 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6861b3-17c5-4195-bfc4-f51c356d8698" containerName="dnsmasq-dns" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.858819 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.861531 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.862059 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wb57f" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.862233 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.862887 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.888662 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.962498 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.963028 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.963071 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/117adca6-a79f-49a6-8ad2-715c4ec7debb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.963119 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-scripts\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.963158 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.963189 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dnxl\" (UniqueName: \"kubernetes.io/projected/117adca6-a79f-49a6-8ad2-715c4ec7debb-kube-api-access-4dnxl\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.965154 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9f47d9d6c-2s7rt"] Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.967287 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:34 crc kubenswrapper[4810]: I1203 06:00:34.976291 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9f47d9d6c-2s7rt"] Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.064827 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-sb\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.064913 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-swift-storage-0\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.064947 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-config\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.064974 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.064996 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065030 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-nb\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065050 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/117adca6-a79f-49a6-8ad2-715c4ec7debb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065073 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-svc\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065117 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-scripts\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065142 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6tmp\" (UniqueName: \"kubernetes.io/projected/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-kube-api-access-h6tmp\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065173 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.065195 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dnxl\" (UniqueName: \"kubernetes.io/projected/117adca6-a79f-49a6-8ad2-715c4ec7debb-kube-api-access-4dnxl\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.078052 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/117adca6-a79f-49a6-8ad2-715c4ec7debb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.084626 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.085439 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.093407 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-scripts\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.104213 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.130668 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dnxl\" (UniqueName: \"kubernetes.io/projected/117adca6-a79f-49a6-8ad2-715c4ec7debb-kube-api-access-4dnxl\") pod \"cinder-scheduler-0\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.179434 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-sb\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.179528 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-swift-storage-0\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.179563 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-config\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.179610 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-nb\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.179635 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-svc\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.179698 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6tmp\" (UniqueName: \"kubernetes.io/projected/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-kube-api-access-h6tmp\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.180652 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-sb\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.180930 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-swift-storage-0\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.181501 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-nb\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.181561 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-svc\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.181880 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-config\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.182117 4810 generic.go:334] "Generic (PLEG): container finished" podID="0881496b-922a-4333-a59d-3f953bcdd31d" containerID="ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a" exitCode=0 Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.182153 4810 generic.go:334] "Generic (PLEG): container finished" podID="0881496b-922a-4333-a59d-3f953bcdd31d" containerID="b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520" exitCode=2 Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.182179 4810 generic.go:334] "Generic (PLEG): container finished" podID="0881496b-922a-4333-a59d-3f953bcdd31d" containerID="513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05" exitCode=0 Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.182203 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerDied","Data":"ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a"} Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.182235 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerDied","Data":"b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520"} Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.182265 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerDied","Data":"513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05"} Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.197327 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.207551 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6tmp\" (UniqueName: \"kubernetes.io/projected/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-kube-api-access-h6tmp\") pod \"dnsmasq-dns-9f47d9d6c-2s7rt\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.302134 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.396268 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.418420 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.418911 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.446051 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.494661 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1931421c-a4c1-47fd-b4a0-dfec6d803de9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.501248 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data-custom\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.501317 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1931421c-a4c1-47fd-b4a0-dfec6d803de9-logs\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.501449 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.501507 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.501607 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-scripts\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.501636 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gsxf\" (UniqueName: \"kubernetes.io/projected/1931421c-a4c1-47fd-b4a0-dfec6d803de9-kube-api-access-9gsxf\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603290 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data-custom\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603372 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1931421c-a4c1-47fd-b4a0-dfec6d803de9-logs\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603414 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603439 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603471 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-scripts\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603494 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gsxf\" (UniqueName: \"kubernetes.io/projected/1931421c-a4c1-47fd-b4a0-dfec6d803de9-kube-api-access-9gsxf\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603547 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1931421c-a4c1-47fd-b4a0-dfec6d803de9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.603645 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1931421c-a4c1-47fd-b4a0-dfec6d803de9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.605351 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1931421c-a4c1-47fd-b4a0-dfec6d803de9-logs\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.622294 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data-custom\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.623307 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-scripts\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.628112 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.637830 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.640514 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gsxf\" (UniqueName: \"kubernetes.io/projected/1931421c-a4c1-47fd-b4a0-dfec6d803de9-kube-api-access-9gsxf\") pod \"cinder-api-0\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " pod="openstack/cinder-api-0" Dec 03 06:00:35 crc kubenswrapper[4810]: I1203 06:00:35.766290 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.042938 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.088226 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9f47d9d6c-2s7rt"] Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.190210 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.190267 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.215003 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" event={"ID":"eb3328f9-d31a-4534-a685-7aa3b0aad3f2","Type":"ContainerStarted","Data":"5258da383fbef9388db06cf17f5d5d898bf8dd7a2a25721a551f2d3e50efceb5"} Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.228752 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"117adca6-a79f-49a6-8ad2-715c4ec7debb","Type":"ContainerStarted","Data":"c2645e7fa160fe134bb10623e88aa5a14f3701dcce2810e489d1891b8d0cb2dc"} Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.250170 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.268092 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.331841 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.441541 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbca557f-95c4-460b-9d6d-8cd23b748c5b" path="/var/lib/kubelet/pods/fbca557f-95c4-460b-9d6d-8cd23b748c5b/volumes" Dec 03 06:00:36 crc kubenswrapper[4810]: I1203 06:00:36.965841 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.059586 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-log-httpd\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.059659 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-scripts\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.059924 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-run-httpd\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.059959 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-combined-ca-bundle\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.059989 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-config-data\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.060022 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-sg-core-conf-yaml\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.060071 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtr7n\" (UniqueName: \"kubernetes.io/projected/0881496b-922a-4333-a59d-3f953bcdd31d-kube-api-access-gtr7n\") pod \"0881496b-922a-4333-a59d-3f953bcdd31d\" (UID: \"0881496b-922a-4333-a59d-3f953bcdd31d\") " Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.060879 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.061276 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.071106 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-scripts" (OuterVolumeSpecName: "scripts") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.071126 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0881496b-922a-4333-a59d-3f953bcdd31d-kube-api-access-gtr7n" (OuterVolumeSpecName: "kube-api-access-gtr7n") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "kube-api-access-gtr7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.124066 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.162840 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.163161 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.163173 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtr7n\" (UniqueName: \"kubernetes.io/projected/0881496b-922a-4333-a59d-3f953bcdd31d-kube-api-access-gtr7n\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.163183 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0881496b-922a-4333-a59d-3f953bcdd31d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.163192 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.176072 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.225822 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-config-data" (OuterVolumeSpecName: "config-data") pod "0881496b-922a-4333-a59d-3f953bcdd31d" (UID: "0881496b-922a-4333-a59d-3f953bcdd31d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.247946 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1931421c-a4c1-47fd-b4a0-dfec6d803de9","Type":"ContainerStarted","Data":"6da0eda216e8ff522ec068cc38361b62149d2433856b5f5284348e64427a5dad"} Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.251441 4810 generic.go:334] "Generic (PLEG): container finished" podID="0881496b-922a-4333-a59d-3f953bcdd31d" containerID="2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194" exitCode=0 Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.251557 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.251617 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerDied","Data":"2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194"} Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.251687 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0881496b-922a-4333-a59d-3f953bcdd31d","Type":"ContainerDied","Data":"657b4604baead2eb5e04ef29e2b4230c6aa08f46b83217c40c4470b3303f5875"} Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.251718 4810 scope.go:117] "RemoveContainer" containerID="ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.255723 4810 generic.go:334] "Generic (PLEG): container finished" podID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerID="af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5" exitCode=0 Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.257492 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" event={"ID":"eb3328f9-d31a-4534-a685-7aa3b0aad3f2","Type":"ContainerDied","Data":"af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5"} Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.257605 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.257742 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.265351 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.265492 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0881496b-922a-4333-a59d-3f953bcdd31d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.302753 4810 scope.go:117] "RemoveContainer" containerID="b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.331196 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.399723 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.410146 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.410792 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="proxy-httpd" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.410809 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="proxy-httpd" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.410833 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-central-agent" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.410838 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-central-agent" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.410865 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-notification-agent" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.410872 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-notification-agent" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.410880 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="sg-core" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.410887 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="sg-core" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.411110 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-notification-agent" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.411129 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="sg-core" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.411140 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="proxy-httpd" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.411165 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" containerName="ceilometer-central-agent" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.413235 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.420011 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.420609 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.423360 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.473345 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-run-httpd\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.473594 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.473678 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h847w\" (UniqueName: \"kubernetes.io/projected/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-kube-api-access-h847w\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.473792 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-log-httpd\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.474212 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.474497 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-config-data\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.474587 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-scripts\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.526946 4810 scope.go:117] "RemoveContainer" containerID="2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577244 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577295 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-config-data\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577328 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-scripts\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577373 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-run-httpd\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577418 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577442 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h847w\" (UniqueName: \"kubernetes.io/projected/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-kube-api-access-h847w\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.577478 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-log-httpd\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.579032 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-run-httpd\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.579748 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-log-httpd\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.586989 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-scripts\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.587943 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.590018 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-config-data\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.598920 4810 scope.go:117] "RemoveContainer" containerID="513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.605838 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.608438 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h847w\" (UniqueName: \"kubernetes.io/projected/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-kube-api-access-h847w\") pod \"ceilometer-0\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.638394 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.687837 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.703353 4810 scope.go:117] "RemoveContainer" containerID="ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.704086 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a\": container with ID starting with ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a not found: ID does not exist" containerID="ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.704134 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a"} err="failed to get container status \"ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a\": rpc error: code = NotFound desc = could not find container \"ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a\": container with ID starting with ea7b17bc1444bc2552d7cbb7a98d8dbe68a3dfd3280cbf59d7c4979c0fce348a not found: ID does not exist" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.704164 4810 scope.go:117] "RemoveContainer" containerID="b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.704548 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520\": container with ID starting with b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520 not found: ID does not exist" containerID="b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.704575 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520"} err="failed to get container status \"b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520\": rpc error: code = NotFound desc = could not find container \"b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520\": container with ID starting with b7012a2d6538cf6104af4ace2337d28c0a2822e4049cf19a2ac4857bf2856520 not found: ID does not exist" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.704591 4810 scope.go:117] "RemoveContainer" containerID="2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.704886 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194\": container with ID starting with 2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194 not found: ID does not exist" containerID="2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.704992 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194"} err="failed to get container status \"2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194\": rpc error: code = NotFound desc = could not find container \"2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194\": container with ID starting with 2436980ad597b47be6a57a0bc5eeb6d096728d3d39cfb4208da7ea5372c84194 not found: ID does not exist" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.705475 4810 scope.go:117] "RemoveContainer" containerID="513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05" Dec 03 06:00:37 crc kubenswrapper[4810]: E1203 06:00:37.706166 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05\": container with ID starting with 513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05 not found: ID does not exist" containerID="513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.706216 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05"} err="failed to get container status \"513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05\": rpc error: code = NotFound desc = could not find container \"513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05\": container with ID starting with 513ebba9a327b764d999a98b4ce90b5b258be51c615a2dad61dee4878b16cd05 not found: ID does not exist" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.882086 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 06:00:37 crc kubenswrapper[4810]: I1203 06:00:37.883797 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.282630 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" event={"ID":"eb3328f9-d31a-4534-a685-7aa3b0aad3f2","Type":"ContainerStarted","Data":"e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e"} Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.283550 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.291321 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"117adca6-a79f-49a6-8ad2-715c4ec7debb","Type":"ContainerStarted","Data":"de79c029f6fe985a630e4faab43d3cab5491e4b956e2717632accceb43f387f9"} Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.293550 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1931421c-a4c1-47fd-b4a0-dfec6d803de9","Type":"ContainerStarted","Data":"6834d032e8428dcc5ea67c540e14c487a263bc7e9102facbf436ab14be87d36b"} Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.316369 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" podStartSLOduration=4.316345257 podStartE2EDuration="4.316345257s" podCreationTimestamp="2025-12-03 06:00:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:38.311767037 +0000 UTC m=+1162.247227878" watchObservedRunningTime="2025-12-03 06:00:38.316345257 +0000 UTC m=+1162.251806098" Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.407716 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0881496b-922a-4333-a59d-3f953bcdd31d" path="/var/lib/kubelet/pods/0881496b-922a-4333-a59d-3f953bcdd31d/volumes" Dec 03 06:00:38 crc kubenswrapper[4810]: I1203 06:00:38.447353 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.300755 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:53048->10.217.0.154:9311: read: connection reset by peer" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.300791 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:53036->10.217.0.154:9311: read: connection reset by peer" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.323785 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"117adca6-a79f-49a6-8ad2-715c4ec7debb","Type":"ContainerStarted","Data":"5c3996deb4ccf42c8b82ff764372ce82c13011b3068ae09ce97ac16a6167582a"} Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.325764 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1931421c-a4c1-47fd-b4a0-dfec6d803de9","Type":"ContainerStarted","Data":"dbcbfee7463650f67ef1f74a2f1be71afdc5dbd9d179e0693c73eecf49694baa"} Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.325921 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api-log" containerID="cri-o://6834d032e8428dcc5ea67c540e14c487a263bc7e9102facbf436ab14be87d36b" gracePeriod=30 Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.326024 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.326062 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api" containerID="cri-o://dbcbfee7463650f67ef1f74a2f1be71afdc5dbd9d179e0693c73eecf49694baa" gracePeriod=30 Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.330791 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerStarted","Data":"20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff"} Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.330858 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerStarted","Data":"82d3d5d55a4b8a5c45b501bf19cb16da833880fe4b64b45444735501d396b3e7"} Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.330982 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.330998 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.343285 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.851456357 podStartE2EDuration="5.343266636s" podCreationTimestamp="2025-12-03 06:00:34 +0000 UTC" firstStartedPulling="2025-12-03 06:00:36.059976757 +0000 UTC m=+1159.995437598" lastFinishedPulling="2025-12-03 06:00:36.551787036 +0000 UTC m=+1160.487247877" observedRunningTime="2025-12-03 06:00:39.341075459 +0000 UTC m=+1163.276536300" watchObservedRunningTime="2025-12-03 06:00:39.343266636 +0000 UTC m=+1163.278727477" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.374893 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.374854363 podStartE2EDuration="4.374854363s" podCreationTimestamp="2025-12-03 06:00:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:39.369936654 +0000 UTC m=+1163.305397495" watchObservedRunningTime="2025-12-03 06:00:39.374854363 +0000 UTC m=+1163.310315204" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.607493 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.874807 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:39 crc kubenswrapper[4810]: I1203 06:00:39.925492 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.065133 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data-custom\") pod \"5c435fcb-cba8-43e8-b6ea-f273647f0264\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.065612 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c435fcb-cba8-43e8-b6ea-f273647f0264-logs\") pod \"5c435fcb-cba8-43e8-b6ea-f273647f0264\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.065658 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhsdf\" (UniqueName: \"kubernetes.io/projected/5c435fcb-cba8-43e8-b6ea-f273647f0264-kube-api-access-vhsdf\") pod \"5c435fcb-cba8-43e8-b6ea-f273647f0264\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.065696 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data\") pod \"5c435fcb-cba8-43e8-b6ea-f273647f0264\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.065773 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-combined-ca-bundle\") pod \"5c435fcb-cba8-43e8-b6ea-f273647f0264\" (UID: \"5c435fcb-cba8-43e8-b6ea-f273647f0264\") " Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.067042 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c435fcb-cba8-43e8-b6ea-f273647f0264-logs" (OuterVolumeSpecName: "logs") pod "5c435fcb-cba8-43e8-b6ea-f273647f0264" (UID: "5c435fcb-cba8-43e8-b6ea-f273647f0264"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.073382 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5c435fcb-cba8-43e8-b6ea-f273647f0264" (UID: "5c435fcb-cba8-43e8-b6ea-f273647f0264"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.097763 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c435fcb-cba8-43e8-b6ea-f273647f0264-kube-api-access-vhsdf" (OuterVolumeSpecName: "kube-api-access-vhsdf") pod "5c435fcb-cba8-43e8-b6ea-f273647f0264" (UID: "5c435fcb-cba8-43e8-b6ea-f273647f0264"). InnerVolumeSpecName "kube-api-access-vhsdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.107944 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c435fcb-cba8-43e8-b6ea-f273647f0264" (UID: "5c435fcb-cba8-43e8-b6ea-f273647f0264"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.130378 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data" (OuterVolumeSpecName: "config-data") pod "5c435fcb-cba8-43e8-b6ea-f273647f0264" (UID: "5c435fcb-cba8-43e8-b6ea-f273647f0264"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.168206 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.168243 4810 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.168256 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c435fcb-cba8-43e8-b6ea-f273647f0264-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.168268 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhsdf\" (UniqueName: \"kubernetes.io/projected/5c435fcb-cba8-43e8-b6ea-f273647f0264-kube-api-access-vhsdf\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.168281 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c435fcb-cba8-43e8-b6ea-f273647f0264-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.197577 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.344488 4810 generic.go:334] "Generic (PLEG): container finished" podID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerID="6834d032e8428dcc5ea67c540e14c487a263bc7e9102facbf436ab14be87d36b" exitCode=143 Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.344578 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1931421c-a4c1-47fd-b4a0-dfec6d803de9","Type":"ContainerDied","Data":"6834d032e8428dcc5ea67c540e14c487a263bc7e9102facbf436ab14be87d36b"} Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.346363 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerStarted","Data":"de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8"} Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.351546 4810 generic.go:334] "Generic (PLEG): container finished" podID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerID="c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f" exitCode=0 Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.353087 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.353214 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6994c56bc4-rkwdw" event={"ID":"5c435fcb-cba8-43e8-b6ea-f273647f0264","Type":"ContainerDied","Data":"c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f"} Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.353254 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6994c56bc4-rkwdw" event={"ID":"5c435fcb-cba8-43e8-b6ea-f273647f0264","Type":"ContainerDied","Data":"4ccd99302a1f46586ab86d209716d01f69b3f0e9ed22a8d9fcf239c36a85fc7b"} Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.353303 4810 scope.go:117] "RemoveContainer" containerID="c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.463578 4810 scope.go:117] "RemoveContainer" containerID="4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.492164 4810 scope.go:117] "RemoveContainer" containerID="c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f" Dec 03 06:00:40 crc kubenswrapper[4810]: E1203 06:00:40.493128 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f\": container with ID starting with c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f not found: ID does not exist" containerID="c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.493161 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f"} err="failed to get container status \"c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f\": rpc error: code = NotFound desc = could not find container \"c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f\": container with ID starting with c382fe15e206e59e7c56ff344a9e21e236f36a5c3546c625a968192d89b4bc7f not found: ID does not exist" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.493187 4810 scope.go:117] "RemoveContainer" containerID="4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c" Dec 03 06:00:40 crc kubenswrapper[4810]: E1203 06:00:40.495632 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c\": container with ID starting with 4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c not found: ID does not exist" containerID="4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c" Dec 03 06:00:40 crc kubenswrapper[4810]: I1203 06:00:40.495689 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c"} err="failed to get container status \"4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c\": rpc error: code = NotFound desc = could not find container \"4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c\": container with ID starting with 4a0824a0c6d3da6b5f0a165d5eacb48dbf6885179131bec5ac423b052029b40c not found: ID does not exist" Dec 03 06:00:41 crc kubenswrapper[4810]: I1203 06:00:41.369363 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerStarted","Data":"635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb"} Dec 03 06:00:42 crc kubenswrapper[4810]: I1203 06:00:42.389633 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerStarted","Data":"8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307"} Dec 03 06:00:42 crc kubenswrapper[4810]: I1203 06:00:42.390927 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:00:42 crc kubenswrapper[4810]: I1203 06:00:42.419048 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.334137008 podStartE2EDuration="5.419017675s" podCreationTimestamp="2025-12-03 06:00:37 +0000 UTC" firstStartedPulling="2025-12-03 06:00:38.482584707 +0000 UTC m=+1162.418045538" lastFinishedPulling="2025-12-03 06:00:41.567465374 +0000 UTC m=+1165.502926205" observedRunningTime="2025-12-03 06:00:42.410244975 +0000 UTC m=+1166.345705856" watchObservedRunningTime="2025-12-03 06:00:42.419017675 +0000 UTC m=+1166.354478546" Dec 03 06:00:42 crc kubenswrapper[4810]: I1203 06:00:42.503119 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:44 crc kubenswrapper[4810]: E1203 06:00:44.534172 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846\": RecentStats: unable to find data in memory cache]" Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.304932 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.376672 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d54bd6d5f-lrqmg"] Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.376937 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerName="dnsmasq-dns" containerID="cri-o://e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de" gracePeriod=10 Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.391019 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-74c9fd966f-8fc7k" Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.481921 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76d8d4696d-45zhd"] Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.486685 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76d8d4696d-45zhd" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-api" containerID="cri-o://fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11" gracePeriod=30 Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.487263 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76d8d4696d-45zhd" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-httpd" containerID="cri-o://1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3" gracePeriod=30 Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.669885 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.765576 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.857721 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.927058 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-67f9f779cb-gh9cv" Dec 03 06:00:45 crc kubenswrapper[4810]: I1203 06:00:45.980848 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.105630 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-config\") pod \"25771020-a167-4b77-a3c9-ccc65dd8d6df\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.105800 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-nb\") pod \"25771020-a167-4b77-a3c9-ccc65dd8d6df\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.105957 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-swift-storage-0\") pod \"25771020-a167-4b77-a3c9-ccc65dd8d6df\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.106048 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-svc\") pod \"25771020-a167-4b77-a3c9-ccc65dd8d6df\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.106114 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-sb\") pod \"25771020-a167-4b77-a3c9-ccc65dd8d6df\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.106173 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc5pt\" (UniqueName: \"kubernetes.io/projected/25771020-a167-4b77-a3c9-ccc65dd8d6df-kube-api-access-wc5pt\") pod \"25771020-a167-4b77-a3c9-ccc65dd8d6df\" (UID: \"25771020-a167-4b77-a3c9-ccc65dd8d6df\") " Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.150115 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25771020-a167-4b77-a3c9-ccc65dd8d6df-kube-api-access-wc5pt" (OuterVolumeSpecName: "kube-api-access-wc5pt") pod "25771020-a167-4b77-a3c9-ccc65dd8d6df" (UID: "25771020-a167-4b77-a3c9-ccc65dd8d6df"). InnerVolumeSpecName "kube-api-access-wc5pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.192148 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "25771020-a167-4b77-a3c9-ccc65dd8d6df" (UID: "25771020-a167-4b77-a3c9-ccc65dd8d6df"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.192403 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "25771020-a167-4b77-a3c9-ccc65dd8d6df" (UID: "25771020-a167-4b77-a3c9-ccc65dd8d6df"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.204423 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "25771020-a167-4b77-a3c9-ccc65dd8d6df" (UID: "25771020-a167-4b77-a3c9-ccc65dd8d6df"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.217377 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.217428 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.217445 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.217459 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc5pt\" (UniqueName: \"kubernetes.io/projected/25771020-a167-4b77-a3c9-ccc65dd8d6df-kube-api-access-wc5pt\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.237823 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-config" (OuterVolumeSpecName: "config") pod "25771020-a167-4b77-a3c9-ccc65dd8d6df" (UID: "25771020-a167-4b77-a3c9-ccc65dd8d6df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.259342 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "25771020-a167-4b77-a3c9-ccc65dd8d6df" (UID: "25771020-a167-4b77-a3c9-ccc65dd8d6df"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.321321 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.321382 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25771020-a167-4b77-a3c9-ccc65dd8d6df-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.455959 4810 generic.go:334] "Generic (PLEG): container finished" podID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerID="e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de" exitCode=0 Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.456036 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.456025 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" event={"ID":"25771020-a167-4b77-a3c9-ccc65dd8d6df","Type":"ContainerDied","Data":"e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de"} Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.456119 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d54bd6d5f-lrqmg" event={"ID":"25771020-a167-4b77-a3c9-ccc65dd8d6df","Type":"ContainerDied","Data":"f02dce24c177969869bdb2cbe9abe1c0a1f0b3abd0c7fec4e36b165dd1b31ed1"} Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.456162 4810 scope.go:117] "RemoveContainer" containerID="e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.458251 4810 generic.go:334] "Generic (PLEG): container finished" podID="c65e15df-594b-4292-b784-a8586fbec721" containerID="1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3" exitCode=0 Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.458668 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d8d4696d-45zhd" event={"ID":"c65e15df-594b-4292-b784-a8586fbec721","Type":"ContainerDied","Data":"1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3"} Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.458923 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="cinder-scheduler" containerID="cri-o://de79c029f6fe985a630e4faab43d3cab5491e4b956e2717632accceb43f387f9" gracePeriod=30 Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.459226 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="probe" containerID="cri-o://5c3996deb4ccf42c8b82ff764372ce82c13011b3068ae09ce97ac16a6167582a" gracePeriod=30 Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.492064 4810 scope.go:117] "RemoveContainer" containerID="3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.507633 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d54bd6d5f-lrqmg"] Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.516354 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d54bd6d5f-lrqmg"] Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.561544 4810 scope.go:117] "RemoveContainer" containerID="e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de" Dec 03 06:00:46 crc kubenswrapper[4810]: E1203 06:00:46.566068 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de\": container with ID starting with e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de not found: ID does not exist" containerID="e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.566134 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de"} err="failed to get container status \"e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de\": rpc error: code = NotFound desc = could not find container \"e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de\": container with ID starting with e1a68723d018623eca0c9de1a51100bf311a1c5d2a06cbe8c2ad243e7458c1de not found: ID does not exist" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.566164 4810 scope.go:117] "RemoveContainer" containerID="3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804" Dec 03 06:00:46 crc kubenswrapper[4810]: E1203 06:00:46.570188 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804\": container with ID starting with 3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804 not found: ID does not exist" containerID="3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804" Dec 03 06:00:46 crc kubenswrapper[4810]: I1203 06:00:46.570226 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804"} err="failed to get container status \"3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804\": rpc error: code = NotFound desc = could not find container \"3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804\": container with ID starting with 3e0e216bf10999e701e7d94263db32e7ce393189803ebe60f133c060f7351804 not found: ID does not exist" Dec 03 06:00:47 crc kubenswrapper[4810]: I1203 06:00:47.483588 4810 generic.go:334] "Generic (PLEG): container finished" podID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerID="5c3996deb4ccf42c8b82ff764372ce82c13011b3068ae09ce97ac16a6167582a" exitCode=0 Dec 03 06:00:47 crc kubenswrapper[4810]: I1203 06:00:47.483933 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"117adca6-a79f-49a6-8ad2-715c4ec7debb","Type":"ContainerDied","Data":"5c3996deb4ccf42c8b82ff764372ce82c13011b3068ae09ce97ac16a6167582a"} Dec 03 06:00:48 crc kubenswrapper[4810]: I1203 06:00:48.389056 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" path="/var/lib/kubelet/pods/25771020-a167-4b77-a3c9-ccc65dd8d6df/volumes" Dec 03 06:00:48 crc kubenswrapper[4810]: I1203 06:00:48.431787 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 06:00:48 crc kubenswrapper[4810]: I1203 06:00:48.472267 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-69cffb6c97-gskt7" Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.512016 4810 generic.go:334] "Generic (PLEG): container finished" podID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerID="de79c029f6fe985a630e4faab43d3cab5491e4b956e2717632accceb43f387f9" exitCode=0 Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.512089 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"117adca6-a79f-49a6-8ad2-715c4ec7debb","Type":"ContainerDied","Data":"de79c029f6fe985a630e4faab43d3cab5491e4b956e2717632accceb43f387f9"} Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.825573 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.899687 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-combined-ca-bundle\") pod \"117adca6-a79f-49a6-8ad2-715c4ec7debb\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.899793 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/117adca6-a79f-49a6-8ad2-715c4ec7debb-etc-machine-id\") pod \"117adca6-a79f-49a6-8ad2-715c4ec7debb\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.899887 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data\") pod \"117adca6-a79f-49a6-8ad2-715c4ec7debb\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.899908 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dnxl\" (UniqueName: \"kubernetes.io/projected/117adca6-a79f-49a6-8ad2-715c4ec7debb-kube-api-access-4dnxl\") pod \"117adca6-a79f-49a6-8ad2-715c4ec7debb\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.900044 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data-custom\") pod \"117adca6-a79f-49a6-8ad2-715c4ec7debb\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.900095 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-scripts\") pod \"117adca6-a79f-49a6-8ad2-715c4ec7debb\" (UID: \"117adca6-a79f-49a6-8ad2-715c4ec7debb\") " Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.904373 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/117adca6-a79f-49a6-8ad2-715c4ec7debb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "117adca6-a79f-49a6-8ad2-715c4ec7debb" (UID: "117adca6-a79f-49a6-8ad2-715c4ec7debb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.913863 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-scripts" (OuterVolumeSpecName: "scripts") pod "117adca6-a79f-49a6-8ad2-715c4ec7debb" (UID: "117adca6-a79f-49a6-8ad2-715c4ec7debb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.931157 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "117adca6-a79f-49a6-8ad2-715c4ec7debb" (UID: "117adca6-a79f-49a6-8ad2-715c4ec7debb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:49 crc kubenswrapper[4810]: I1203 06:00:49.941268 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/117adca6-a79f-49a6-8ad2-715c4ec7debb-kube-api-access-4dnxl" (OuterVolumeSpecName: "kube-api-access-4dnxl") pod "117adca6-a79f-49a6-8ad2-715c4ec7debb" (UID: "117adca6-a79f-49a6-8ad2-715c4ec7debb"). InnerVolumeSpecName "kube-api-access-4dnxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.007054 4810 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.007087 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.007098 4810 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/117adca6-a79f-49a6-8ad2-715c4ec7debb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.007112 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dnxl\" (UniqueName: \"kubernetes.io/projected/117adca6-a79f-49a6-8ad2-715c4ec7debb-kube-api-access-4dnxl\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.020214 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "117adca6-a79f-49a6-8ad2-715c4ec7debb" (UID: "117adca6-a79f-49a6-8ad2-715c4ec7debb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.086754 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data" (OuterVolumeSpecName: "config-data") pod "117adca6-a79f-49a6-8ad2-715c4ec7debb" (UID: "117adca6-a79f-49a6-8ad2-715c4ec7debb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.108384 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.108413 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117adca6-a79f-49a6-8ad2-715c4ec7debb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.524017 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"117adca6-a79f-49a6-8ad2-715c4ec7debb","Type":"ContainerDied","Data":"c2645e7fa160fe134bb10623e88aa5a14f3701dcce2810e489d1891b8d0cb2dc"} Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.524087 4810 scope.go:117] "RemoveContainer" containerID="5c3996deb4ccf42c8b82ff764372ce82c13011b3068ae09ce97ac16a6167582a" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.524247 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.562361 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.563677 4810 scope.go:117] "RemoveContainer" containerID="de79c029f6fe985a630e4faab43d3cab5491e4b956e2717632accceb43f387f9" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.569031 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596177 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:50 crc kubenswrapper[4810]: E1203 06:00:50.596787 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596810 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" Dec 03 06:00:50 crc kubenswrapper[4810]: E1203 06:00:50.596826 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="cinder-scheduler" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596834 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="cinder-scheduler" Dec 03 06:00:50 crc kubenswrapper[4810]: E1203 06:00:50.596862 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerName="init" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596870 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerName="init" Dec 03 06:00:50 crc kubenswrapper[4810]: E1203 06:00:50.596885 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596894 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" Dec 03 06:00:50 crc kubenswrapper[4810]: E1203 06:00:50.596923 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="probe" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596930 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="probe" Dec 03 06:00:50 crc kubenswrapper[4810]: E1203 06:00:50.596937 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerName="dnsmasq-dns" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.596944 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerName="dnsmasq-dns" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.597198 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api-log" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.597219 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="probe" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.597233 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="25771020-a167-4b77-a3c9-ccc65dd8d6df" containerName="dnsmasq-dns" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.597247 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" containerName="barbican-api" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.597258 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" containerName="cinder-scheduler" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.598400 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.609583 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.628666 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.724538 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2h87\" (UniqueName: \"kubernetes.io/projected/d2130a28-40ad-4938-a265-8114fbcf38a1-kube-api-access-d2h87\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.724597 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.724628 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-scripts\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.725027 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-config-data\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.725083 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d2130a28-40ad-4938-a265-8114fbcf38a1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.725460 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827194 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827518 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2h87\" (UniqueName: \"kubernetes.io/projected/d2130a28-40ad-4938-a265-8114fbcf38a1-kube-api-access-d2h87\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827543 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827565 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-scripts\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827627 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-config-data\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827646 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d2130a28-40ad-4938-a265-8114fbcf38a1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.827749 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d2130a28-40ad-4938-a265-8114fbcf38a1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.832674 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-scripts\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.832974 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.834143 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-config-data\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.841636 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2130a28-40ad-4938-a265-8114fbcf38a1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.858662 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2h87\" (UniqueName: \"kubernetes.io/projected/d2130a28-40ad-4938-a265-8114fbcf38a1-kube-api-access-d2h87\") pod \"cinder-scheduler-0\" (UID: \"d2130a28-40ad-4938-a265-8114fbcf38a1\") " pod="openstack/cinder-scheduler-0" Dec 03 06:00:50 crc kubenswrapper[4810]: I1203 06:00:50.925385 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 06:00:51 crc kubenswrapper[4810]: W1203 06:00:51.491819 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2130a28_40ad_4938_a265_8114fbcf38a1.slice/crio-0ae4ed0c73e4ac936b0471a85c92556bf64479b1eaadf9f4d0ad2248b53664a0 WatchSource:0}: Error finding container 0ae4ed0c73e4ac936b0471a85c92556bf64479b1eaadf9f4d0ad2248b53664a0: Status 404 returned error can't find the container with id 0ae4ed0c73e4ac936b0471a85c92556bf64479b1eaadf9f4d0ad2248b53664a0 Dec 03 06:00:51 crc kubenswrapper[4810]: I1203 06:00:51.494683 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 06:00:51 crc kubenswrapper[4810]: I1203 06:00:51.539291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d2130a28-40ad-4938-a265-8114fbcf38a1","Type":"ContainerStarted","Data":"0ae4ed0c73e4ac936b0471a85c92556bf64479b1eaadf9f4d0ad2248b53664a0"} Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.017149 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.019063 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.022238 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.022662 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-jnhj2" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.023366 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.072134 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.185168 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db524b8c-98e9-41bf-be3f-5376226012e4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.185258 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db524b8c-98e9-41bf-be3f-5376226012e4-openstack-config\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.185292 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldfs7\" (UniqueName: \"kubernetes.io/projected/db524b8c-98e9-41bf-be3f-5376226012e4-kube-api-access-ldfs7\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.185396 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db524b8c-98e9-41bf-be3f-5376226012e4-openstack-config-secret\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.288233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db524b8c-98e9-41bf-be3f-5376226012e4-openstack-config-secret\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.288772 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db524b8c-98e9-41bf-be3f-5376226012e4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.288840 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db524b8c-98e9-41bf-be3f-5376226012e4-openstack-config\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.288865 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldfs7\" (UniqueName: \"kubernetes.io/projected/db524b8c-98e9-41bf-be3f-5376226012e4-kube-api-access-ldfs7\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.290390 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db524b8c-98e9-41bf-be3f-5376226012e4-openstack-config\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.298199 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db524b8c-98e9-41bf-be3f-5376226012e4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.301289 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db524b8c-98e9-41bf-be3f-5376226012e4-openstack-config-secret\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.318118 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldfs7\" (UniqueName: \"kubernetes.io/projected/db524b8c-98e9-41bf-be3f-5376226012e4-kube-api-access-ldfs7\") pod \"openstackclient\" (UID: \"db524b8c-98e9-41bf-be3f-5376226012e4\") " pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.355326 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.397833 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="117adca6-a79f-49a6-8ad2-715c4ec7debb" path="/var/lib/kubelet/pods/117adca6-a79f-49a6-8ad2-715c4ec7debb/volumes" Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.575117 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d2130a28-40ad-4938-a265-8114fbcf38a1","Type":"ContainerStarted","Data":"770f42dd225cfed2f7ae59f6e52813ef97ced989851f4eb65a512933f21b909c"} Dec 03 06:00:52 crc kubenswrapper[4810]: I1203 06:00:52.735795 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 06:00:52 crc kubenswrapper[4810]: W1203 06:00:52.740703 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb524b8c_98e9_41bf_be3f_5376226012e4.slice/crio-ce18e4c87697f56ca6219af97fe72c3079aff2b330c3a7f9d1f190c25a5b8302 WatchSource:0}: Error finding container ce18e4c87697f56ca6219af97fe72c3079aff2b330c3a7f9d1f190c25a5b8302: Status 404 returned error can't find the container with id ce18e4c87697f56ca6219af97fe72c3079aff2b330c3a7f9d1f190c25a5b8302 Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.188793 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.317362 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-config\") pod \"c65e15df-594b-4292-b784-a8586fbec721\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.317434 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-httpd-config\") pod \"c65e15df-594b-4292-b784-a8586fbec721\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.317552 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-ovndb-tls-certs\") pod \"c65e15df-594b-4292-b784-a8586fbec721\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.317669 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-combined-ca-bundle\") pod \"c65e15df-594b-4292-b784-a8586fbec721\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.317703 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jwtw\" (UniqueName: \"kubernetes.io/projected/c65e15df-594b-4292-b784-a8586fbec721-kube-api-access-6jwtw\") pod \"c65e15df-594b-4292-b784-a8586fbec721\" (UID: \"c65e15df-594b-4292-b784-a8586fbec721\") " Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.324042 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c65e15df-594b-4292-b784-a8586fbec721" (UID: "c65e15df-594b-4292-b784-a8586fbec721"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.330038 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c65e15df-594b-4292-b784-a8586fbec721-kube-api-access-6jwtw" (OuterVolumeSpecName: "kube-api-access-6jwtw") pod "c65e15df-594b-4292-b784-a8586fbec721" (UID: "c65e15df-594b-4292-b784-a8586fbec721"). InnerVolumeSpecName "kube-api-access-6jwtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.387977 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-config" (OuterVolumeSpecName: "config") pod "c65e15df-594b-4292-b784-a8586fbec721" (UID: "c65e15df-594b-4292-b784-a8586fbec721"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.419870 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c65e15df-594b-4292-b784-a8586fbec721" (UID: "c65e15df-594b-4292-b784-a8586fbec721"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.420400 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.420432 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jwtw\" (UniqueName: \"kubernetes.io/projected/c65e15df-594b-4292-b784-a8586fbec721-kube-api-access-6jwtw\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.420444 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.420456 4810 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.422926 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c65e15df-594b-4292-b784-a8586fbec721" (UID: "c65e15df-594b-4292-b784-a8586fbec721"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.522395 4810 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c65e15df-594b-4292-b784-a8586fbec721-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.599129 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"db524b8c-98e9-41bf-be3f-5376226012e4","Type":"ContainerStarted","Data":"ce18e4c87697f56ca6219af97fe72c3079aff2b330c3a7f9d1f190c25a5b8302"} Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.603145 4810 generic.go:334] "Generic (PLEG): container finished" podID="c65e15df-594b-4292-b784-a8586fbec721" containerID="fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11" exitCode=0 Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.603229 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d8d4696d-45zhd" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.603246 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d8d4696d-45zhd" event={"ID":"c65e15df-594b-4292-b784-a8586fbec721","Type":"ContainerDied","Data":"fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11"} Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.603312 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d8d4696d-45zhd" event={"ID":"c65e15df-594b-4292-b784-a8586fbec721","Type":"ContainerDied","Data":"db5b7c863114ff70f0097f7492ef7bd97dd9dfd9122646bb07b4e27234a4a6d4"} Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.603335 4810 scope.go:117] "RemoveContainer" containerID="1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.614134 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d2130a28-40ad-4938-a265-8114fbcf38a1","Type":"ContainerStarted","Data":"c15d78f7c3b6469b5cf7edb212cb2f9f44d1eaf1a175257f174fb2b1a470a999"} Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.650396 4810 scope.go:117] "RemoveContainer" containerID="fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.667403 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.667296318 podStartE2EDuration="3.667296318s" podCreationTimestamp="2025-12-03 06:00:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:00:53.647229533 +0000 UTC m=+1177.582690374" watchObservedRunningTime="2025-12-03 06:00:53.667296318 +0000 UTC m=+1177.602757159" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.687759 4810 scope.go:117] "RemoveContainer" containerID="1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3" Dec 03 06:00:53 crc kubenswrapper[4810]: E1203 06:00:53.690064 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3\": container with ID starting with 1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3 not found: ID does not exist" containerID="1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.690129 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3"} err="failed to get container status \"1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3\": rpc error: code = NotFound desc = could not find container \"1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3\": container with ID starting with 1b6f083b363a2ea3e92ab6c4861e90ccfce9ef87f49d6000b0916c42df3cd1e3 not found: ID does not exist" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.690163 4810 scope.go:117] "RemoveContainer" containerID="fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11" Dec 03 06:00:53 crc kubenswrapper[4810]: E1203 06:00:53.690653 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11\": container with ID starting with fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11 not found: ID does not exist" containerID="fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.690694 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11"} err="failed to get container status \"fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11\": rpc error: code = NotFound desc = could not find container \"fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11\": container with ID starting with fdb42ac3eea2fa8962e2fbc4338e61d8dbca57935800c68bbafb53543976ba11 not found: ID does not exist" Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.696510 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76d8d4696d-45zhd"] Dec 03 06:00:53 crc kubenswrapper[4810]: I1203 06:00:53.708250 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-76d8d4696d-45zhd"] Dec 03 06:00:54 crc kubenswrapper[4810]: I1203 06:00:54.395472 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c65e15df-594b-4292-b784-a8586fbec721" path="/var/lib/kubelet/pods/c65e15df-594b-4292-b784-a8586fbec721/volumes" Dec 03 06:00:54 crc kubenswrapper[4810]: E1203 06:00:54.856458 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846\": RecentStats: unable to find data in memory cache]" Dec 03 06:00:55 crc kubenswrapper[4810]: I1203 06:00:55.677343 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:00:55 crc kubenswrapper[4810]: I1203 06:00:55.677430 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:00:55 crc kubenswrapper[4810]: I1203 06:00:55.677491 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:00:55 crc kubenswrapper[4810]: I1203 06:00:55.678197 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"44c04cb46113a276349e0decdfe2671ab188ec4674c9c0e0a836be88642df5e3"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:00:55 crc kubenswrapper[4810]: I1203 06:00:55.678293 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://44c04cb46113a276349e0decdfe2671ab188ec4674c9c0e0a836be88642df5e3" gracePeriod=600 Dec 03 06:00:55 crc kubenswrapper[4810]: I1203 06:00:55.926823 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 06:00:56 crc kubenswrapper[4810]: I1203 06:00:56.685170 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="44c04cb46113a276349e0decdfe2671ab188ec4674c9c0e0a836be88642df5e3" exitCode=0 Dec 03 06:00:56 crc kubenswrapper[4810]: I1203 06:00:56.685239 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"44c04cb46113a276349e0decdfe2671ab188ec4674c9c0e0a836be88642df5e3"} Dec 03 06:00:56 crc kubenswrapper[4810]: I1203 06:00:56.685568 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"c2d89f1bb4f8093d6c6c727461ec0b9a6e075b6b1856024e1665f68befa27390"} Dec 03 06:00:56 crc kubenswrapper[4810]: I1203 06:00:56.685601 4810 scope.go:117] "RemoveContainer" containerID="54276ea7f067e034e52d0f67559acc42b63d689d697478d16f1565a902279985" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.139264 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29412361-hn96l"] Dec 03 06:01:00 crc kubenswrapper[4810]: E1203 06:01:00.146583 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-api" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.146695 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-api" Dec 03 06:01:00 crc kubenswrapper[4810]: E1203 06:01:00.146818 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-httpd" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.146889 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-httpd" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.147156 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-api" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.147248 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c65e15df-594b-4292-b784-a8586fbec721" containerName="neutron-httpd" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.147977 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.151498 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412361-hn96l"] Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.260896 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-fernet-keys\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.261711 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-combined-ca-bundle\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.261857 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvbb8\" (UniqueName: \"kubernetes.io/projected/3a8164cf-937e-4f52-a03e-00708ad12ebb-kube-api-access-gvbb8\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.262035 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-config-data\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.364188 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-config-data\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.364269 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-fernet-keys\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.364294 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-combined-ca-bundle\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.364344 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvbb8\" (UniqueName: \"kubernetes.io/projected/3a8164cf-937e-4f52-a03e-00708ad12ebb-kube-api-access-gvbb8\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.371591 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-config-data\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.372130 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-fernet-keys\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.382462 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-combined-ca-bundle\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.393711 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvbb8\" (UniqueName: \"kubernetes.io/projected/3a8164cf-937e-4f52-a03e-00708ad12ebb-kube-api-access-gvbb8\") pod \"keystone-cron-29412361-hn96l\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.476345 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.566550 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-b455b97b9-skz54"] Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.568321 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.571397 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.571583 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.577956 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.597874 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-b455b97b9-skz54"] Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.669512 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9mbv\" (UniqueName: \"kubernetes.io/projected/61ab11f4-c89b-406d-817d-f652951cf71d-kube-api-access-z9mbv\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.669925 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ab11f4-c89b-406d-817d-f652951cf71d-run-httpd\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.670017 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/61ab11f4-c89b-406d-817d-f652951cf71d-etc-swift\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.670111 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-internal-tls-certs\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.670209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-combined-ca-bundle\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.670281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-public-tls-certs\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.670761 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ab11f4-c89b-406d-817d-f652951cf71d-log-httpd\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.671104 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-config-data\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.773383 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ab11f4-c89b-406d-817d-f652951cf71d-log-httpd\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774349 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-config-data\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774403 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9mbv\" (UniqueName: \"kubernetes.io/projected/61ab11f4-c89b-406d-817d-f652951cf71d-kube-api-access-z9mbv\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774441 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ab11f4-c89b-406d-817d-f652951cf71d-run-httpd\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774465 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/61ab11f4-c89b-406d-817d-f652951cf71d-etc-swift\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774497 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-internal-tls-certs\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774525 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-combined-ca-bundle\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.774546 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-public-tls-certs\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.775448 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ab11f4-c89b-406d-817d-f652951cf71d-log-httpd\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.779082 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ab11f4-c89b-406d-817d-f652951cf71d-run-httpd\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.782097 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/61ab11f4-c89b-406d-817d-f652951cf71d-etc-swift\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.782304 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-combined-ca-bundle\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.799400 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-internal-tls-certs\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.799412 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-public-tls-certs\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.803478 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61ab11f4-c89b-406d-817d-f652951cf71d-config-data\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.805705 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9mbv\" (UniqueName: \"kubernetes.io/projected/61ab11f4-c89b-406d-817d-f652951cf71d-kube-api-access-z9mbv\") pod \"swift-proxy-b455b97b9-skz54\" (UID: \"61ab11f4-c89b-406d-817d-f652951cf71d\") " pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:00 crc kubenswrapper[4810]: I1203 06:01:00.901552 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:01 crc kubenswrapper[4810]: I1203 06:01:01.202259 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 06:01:05 crc kubenswrapper[4810]: E1203 06:01:05.117964 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846\": RecentStats: unable to find data in memory cache]" Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.181784 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.182144 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-central-agent" containerID="cri-o://20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff" gracePeriod=30 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.182201 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="proxy-httpd" containerID="cri-o://8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307" gracePeriod=30 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.182290 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-notification-agent" containerID="cri-o://de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8" gracePeriod=30 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.182798 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="sg-core" containerID="cri-o://635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb" gracePeriod=30 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.187551 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.921082 4810 generic.go:334] "Generic (PLEG): container finished" podID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerID="8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307" exitCode=0 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.921824 4810 generic.go:334] "Generic (PLEG): container finished" podID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerID="635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb" exitCode=2 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.921833 4810 generic.go:334] "Generic (PLEG): container finished" podID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerID="20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff" exitCode=0 Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.921858 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerDied","Data":"8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307"} Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.921896 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerDied","Data":"635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb"} Dec 03 06:01:05 crc kubenswrapper[4810]: I1203 06:01:05.921907 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerDied","Data":"20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.328757 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412361-hn96l"] Dec 03 06:01:06 crc kubenswrapper[4810]: W1203 06:01:06.331546 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a8164cf_937e_4f52_a03e_00708ad12ebb.slice/crio-e3e172bf7b1b124bf92cf9377e7df5e8177e05bdbfd225cf76f940dedfb39e4c WatchSource:0}: Error finding container e3e172bf7b1b124bf92cf9377e7df5e8177e05bdbfd225cf76f940dedfb39e4c: Status 404 returned error can't find the container with id e3e172bf7b1b124bf92cf9377e7df5e8177e05bdbfd225cf76f940dedfb39e4c Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.524632 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-b455b97b9-skz54"] Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.764305 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837101 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-scripts\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837157 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h847w\" (UniqueName: \"kubernetes.io/projected/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-kube-api-access-h847w\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837250 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-run-httpd\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837420 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-log-httpd\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837526 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-config-data\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837547 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-sg-core-conf-yaml\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.837632 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-combined-ca-bundle\") pod \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\" (UID: \"6f87de7c-1ed9-49a9-b8d6-a34e78912f08\") " Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.839308 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.840866 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.844919 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-scripts" (OuterVolumeSpecName: "scripts") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.846121 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-kube-api-access-h847w" (OuterVolumeSpecName: "kube-api-access-h847w") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "kube-api-access-h847w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.885715 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.933891 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"db524b8c-98e9-41bf-be3f-5376226012e4","Type":"ContainerStarted","Data":"8b53e36a7d3f6431c832f3c8004c0ab7030d2df31fb51270308f0fddf3ea155b"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.940884 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.940918 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.940932 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.940945 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.940957 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h847w\" (UniqueName: \"kubernetes.io/projected/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-kube-api-access-h847w\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.948998 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.951140 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.886399214 podStartE2EDuration="15.95112102s" podCreationTimestamp="2025-12-03 06:00:51 +0000 UTC" firstStartedPulling="2025-12-03 06:00:52.743067259 +0000 UTC m=+1176.678528100" lastFinishedPulling="2025-12-03 06:01:05.807789065 +0000 UTC m=+1189.743249906" observedRunningTime="2025-12-03 06:01:06.948902522 +0000 UTC m=+1190.884363363" watchObservedRunningTime="2025-12-03 06:01:06.95112102 +0000 UTC m=+1190.886581861" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.955425 4810 generic.go:334] "Generic (PLEG): container finished" podID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerID="de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8" exitCode=0 Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.955787 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerDied","Data":"de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.955823 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f87de7c-1ed9-49a9-b8d6-a34e78912f08","Type":"ContainerDied","Data":"82d3d5d55a4b8a5c45b501bf19cb16da833880fe4b64b45444735501d396b3e7"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.955844 4810 scope.go:117] "RemoveContainer" containerID="8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.956046 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.965668 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412361-hn96l" event={"ID":"3a8164cf-937e-4f52-a03e-00708ad12ebb","Type":"ContainerStarted","Data":"641a85256f5b7cd00cac2b91340a42b6ec74d8024e8e7c269cebceeaf8fcf169"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.965724 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412361-hn96l" event={"ID":"3a8164cf-937e-4f52-a03e-00708ad12ebb","Type":"ContainerStarted","Data":"e3e172bf7b1b124bf92cf9377e7df5e8177e05bdbfd225cf76f940dedfb39e4c"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.967886 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-b455b97b9-skz54" event={"ID":"61ab11f4-c89b-406d-817d-f652951cf71d","Type":"ContainerStarted","Data":"407eeecda3e7c40879f2302b41a5775e3960223e985a39ee0b02f48effd8b75d"} Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.986911 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29412361-hn96l" podStartSLOduration=6.9868892460000005 podStartE2EDuration="6.986889246s" podCreationTimestamp="2025-12-03 06:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:06.984197956 +0000 UTC m=+1190.919658797" watchObservedRunningTime="2025-12-03 06:01:06.986889246 +0000 UTC m=+1190.922350087" Dec 03 06:01:06 crc kubenswrapper[4810]: I1203 06:01:06.998919 4810 scope.go:117] "RemoveContainer" containerID="635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.034571 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-config-data" (OuterVolumeSpecName: "config-data") pod "6f87de7c-1ed9-49a9-b8d6-a34e78912f08" (UID: "6f87de7c-1ed9-49a9-b8d6-a34e78912f08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.034633 4810 scope.go:117] "RemoveContainer" containerID="de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.042555 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.042637 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f87de7c-1ed9-49a9-b8d6-a34e78912f08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.068165 4810 scope.go:117] "RemoveContainer" containerID="20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.106755 4810 scope.go:117] "RemoveContainer" containerID="8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.107173 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307\": container with ID starting with 8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307 not found: ID does not exist" containerID="8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.107241 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307"} err="failed to get container status \"8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307\": rpc error: code = NotFound desc = could not find container \"8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307\": container with ID starting with 8bcd87467232c5efef390552afdb57788bb23e1c279bb3db0c4b5f3b4d971307 not found: ID does not exist" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.107292 4810 scope.go:117] "RemoveContainer" containerID="635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.107655 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb\": container with ID starting with 635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb not found: ID does not exist" containerID="635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.107694 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb"} err="failed to get container status \"635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb\": rpc error: code = NotFound desc = could not find container \"635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb\": container with ID starting with 635941f4bdcc2dc2657acadc43abecdaa1881cea9911b5040fdc8dda21bde9bb not found: ID does not exist" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.107731 4810 scope.go:117] "RemoveContainer" containerID="de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.107938 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8\": container with ID starting with de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8 not found: ID does not exist" containerID="de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.108041 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8"} err="failed to get container status \"de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8\": rpc error: code = NotFound desc = could not find container \"de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8\": container with ID starting with de2dd3796709e229e37c0764a39a9bf4f7e7ceef6ed4b16e01dee75b43bc04c8 not found: ID does not exist" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.108122 4810 scope.go:117] "RemoveContainer" containerID="20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.108651 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff\": container with ID starting with 20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff not found: ID does not exist" containerID="20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.108684 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff"} err="failed to get container status \"20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff\": rpc error: code = NotFound desc = could not find container \"20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff\": container with ID starting with 20e2e842069b6ca7aff215726b627b98b7bbc179ad13260582792eee1293afff not found: ID does not exist" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.292144 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.309153 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.322156 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.322883 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="sg-core" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.322995 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="sg-core" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.323063 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="proxy-httpd" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323121 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="proxy-httpd" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.323181 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-notification-agent" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323238 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-notification-agent" Dec 03 06:01:07 crc kubenswrapper[4810]: E1203 06:01:07.323307 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-central-agent" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323358 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-central-agent" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323591 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="proxy-httpd" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323660 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-central-agent" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323713 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="ceilometer-notification-agent" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.323811 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" containerName="sg-core" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.325625 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.329346 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.331328 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.349367 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.450641 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.450712 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-log-httpd\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.450820 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-run-httpd\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.451063 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bc87\" (UniqueName: \"kubernetes.io/projected/2c726589-6410-4893-bf46-f0585c3da8a2-kube-api-access-2bc87\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.451272 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.451441 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-scripts\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.451515 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-config-data\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.553993 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-scripts\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554049 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-config-data\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554090 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554125 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-log-httpd\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554222 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-run-httpd\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554258 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bc87\" (UniqueName: \"kubernetes.io/projected/2c726589-6410-4893-bf46-f0585c3da8a2-kube-api-access-2bc87\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554308 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.554886 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-log-httpd\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.555184 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-run-httpd\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.558411 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.558648 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-config-data\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.559676 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-scripts\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.559820 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.574537 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bc87\" (UniqueName: \"kubernetes.io/projected/2c726589-6410-4893-bf46-f0585c3da8a2-kube-api-access-2bc87\") pod \"ceilometer-0\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.679485 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.990266 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-b455b97b9-skz54" event={"ID":"61ab11f4-c89b-406d-817d-f652951cf71d","Type":"ContainerStarted","Data":"8fdd8eab7a14254d25b7ec887502a0634358721bbf4a8b2fc1cc2391c2c6427d"} Dec 03 06:01:07 crc kubenswrapper[4810]: I1203 06:01:07.990784 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-b455b97b9-skz54" event={"ID":"61ab11f4-c89b-406d-817d-f652951cf71d","Type":"ContainerStarted","Data":"96fc18b8869c1ddba239734d46998911f7cc6ecb474c35f841782799848f58d1"} Dec 03 06:01:08 crc kubenswrapper[4810]: I1203 06:01:08.260628 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:08 crc kubenswrapper[4810]: I1203 06:01:08.392057 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f87de7c-1ed9-49a9-b8d6-a34e78912f08" path="/var/lib/kubelet/pods/6f87de7c-1ed9-49a9-b8d6-a34e78912f08/volumes" Dec 03 06:01:09 crc kubenswrapper[4810]: I1203 06:01:09.025937 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerStarted","Data":"379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6"} Dec 03 06:01:09 crc kubenswrapper[4810]: I1203 06:01:09.026027 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerStarted","Data":"fe1f9edd5bcfe50ee5af1f92b5addb856b0bf928bc7393a8919d461b6da1ed69"} Dec 03 06:01:09 crc kubenswrapper[4810]: I1203 06:01:09.026081 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:09 crc kubenswrapper[4810]: I1203 06:01:09.026123 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:09 crc kubenswrapper[4810]: I1203 06:01:09.056759 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-b455b97b9-skz54" podStartSLOduration=9.056704929 podStartE2EDuration="9.056704929s" podCreationTimestamp="2025-12-03 06:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:09.048191897 +0000 UTC m=+1192.983652738" watchObservedRunningTime="2025-12-03 06:01:09.056704929 +0000 UTC m=+1192.992165770" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.036674 4810 generic.go:334] "Generic (PLEG): container finished" podID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerID="dbcbfee7463650f67ef1f74a2f1be71afdc5dbd9d179e0693c73eecf49694baa" exitCode=137 Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.036785 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1931421c-a4c1-47fd-b4a0-dfec6d803de9","Type":"ContainerDied","Data":"dbcbfee7463650f67ef1f74a2f1be71afdc5dbd9d179e0693c73eecf49694baa"} Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.037204 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1931421c-a4c1-47fd-b4a0-dfec6d803de9","Type":"ContainerDied","Data":"6da0eda216e8ff522ec068cc38361b62149d2433856b5f5284348e64427a5dad"} Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.037236 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6da0eda216e8ff522ec068cc38361b62149d2433856b5f5284348e64427a5dad" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.038666 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.042932 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerStarted","Data":"aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b"} Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.042982 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerStarted","Data":"4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248"} Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.047308 4810 generic.go:334] "Generic (PLEG): container finished" podID="3a8164cf-937e-4f52-a03e-00708ad12ebb" containerID="641a85256f5b7cd00cac2b91340a42b6ec74d8024e8e7c269cebceeaf8fcf169" exitCode=0 Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.047410 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412361-hn96l" event={"ID":"3a8164cf-937e-4f52-a03e-00708ad12ebb","Type":"ContainerDied","Data":"641a85256f5b7cd00cac2b91340a42b6ec74d8024e8e7c269cebceeaf8fcf169"} Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.117583 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1931421c-a4c1-47fd-b4a0-dfec6d803de9-logs\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118161 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-scripts\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118237 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118294 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gsxf\" (UniqueName: \"kubernetes.io/projected/1931421c-a4c1-47fd-b4a0-dfec6d803de9-kube-api-access-9gsxf\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118313 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1931421c-a4c1-47fd-b4a0-dfec6d803de9-etc-machine-id\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118357 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data-custom\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118386 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-combined-ca-bundle\") pod \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\" (UID: \"1931421c-a4c1-47fd-b4a0-dfec6d803de9\") " Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118401 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1931421c-a4c1-47fd-b4a0-dfec6d803de9-logs" (OuterVolumeSpecName: "logs") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.118925 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1931421c-a4c1-47fd-b4a0-dfec6d803de9-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.119664 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1931421c-a4c1-47fd-b4a0-dfec6d803de9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.135040 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1931421c-a4c1-47fd-b4a0-dfec6d803de9-kube-api-access-9gsxf" (OuterVolumeSpecName: "kube-api-access-9gsxf") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "kube-api-access-9gsxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.136827 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.138210 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-scripts" (OuterVolumeSpecName: "scripts") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.138411 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.164019 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.221350 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.221401 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gsxf\" (UniqueName: \"kubernetes.io/projected/1931421c-a4c1-47fd-b4a0-dfec6d803de9-kube-api-access-9gsxf\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.221413 4810 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1931421c-a4c1-47fd-b4a0-dfec6d803de9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.221422 4810 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.221432 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.225053 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data" (OuterVolumeSpecName: "config-data") pod "1931421c-a4c1-47fd-b4a0-dfec6d803de9" (UID: "1931421c-a4c1-47fd-b4a0-dfec6d803de9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.324159 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1931421c-a4c1-47fd-b4a0-dfec6d803de9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.450663 4810 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod5c435fcb-cba8-43e8-b6ea-f273647f0264"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod5c435fcb-cba8-43e8-b6ea-f273647f0264] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5c435fcb_cba8_43e8_b6ea_f273647f0264.slice" Dec 03 06:01:10 crc kubenswrapper[4810]: E1203 06:01:10.450764 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod5c435fcb-cba8-43e8-b6ea-f273647f0264] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod5c435fcb-cba8-43e8-b6ea-f273647f0264] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5c435fcb_cba8_43e8_b6ea_f273647f0264.slice" pod="openstack/barbican-api-6994c56bc4-rkwdw" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.465641 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.466511 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-log" containerID="cri-o://7387ca5156bc4b58fbce2b30e2c7926eb0216ddca329561eb212067957565b08" gracePeriod=30 Dec 03 06:01:10 crc kubenswrapper[4810]: I1203 06:01:10.466700 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-httpd" containerID="cri-o://3d23e0239bf0b68767a533f36795e571d2ac50c11a04b4954b8f17883a84b3f4" gracePeriod=30 Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.058407 4810 generic.go:334] "Generic (PLEG): container finished" podID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerID="7387ca5156bc4b58fbce2b30e2c7926eb0216ddca329561eb212067957565b08" exitCode=143 Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.058511 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23b10432-e146-4a0a-a0d7-793e0dae69a2","Type":"ContainerDied","Data":"7387ca5156bc4b58fbce2b30e2c7926eb0216ddca329561eb212067957565b08"} Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.058933 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.059002 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6994c56bc4-rkwdw" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.207458 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6994c56bc4-rkwdw"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.237684 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6994c56bc4-rkwdw"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.247556 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.268525 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.294779 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:01:11 crc kubenswrapper[4810]: E1203 06:01:11.295448 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api-log" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.295470 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api-log" Dec 03 06:01:11 crc kubenswrapper[4810]: E1203 06:01:11.295515 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.295523 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.297555 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api-log" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.297587 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" containerName="cinder-api" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.300468 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.303181 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.303600 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.304454 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.313573 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354772 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354812 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-scripts\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354838 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-config-data-custom\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354864 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rfkr\" (UniqueName: \"kubernetes.io/projected/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-kube-api-access-2rfkr\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354885 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-logs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354909 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.354981 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-config-data\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.355009 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.355066 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.423189 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-dx9c7"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.424585 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.452545 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-dx9c7"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.456886 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-config-data\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.456948 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457031 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzl7j\" (UniqueName: \"kubernetes.io/projected/266d33c8-6cee-4e1c-aec4-e2f291b185e8-kube-api-access-fzl7j\") pod \"nova-api-db-create-dx9c7\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457059 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457104 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457125 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-scripts\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457146 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-config-data-custom\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457172 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rfkr\" (UniqueName: \"kubernetes.io/projected/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-kube-api-access-2rfkr\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457197 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-logs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457219 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.457267 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/266d33c8-6cee-4e1c-aec4-e2f291b185e8-operator-scripts\") pod \"nova-api-db-create-dx9c7\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.463480 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-scripts\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.464675 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-config-data\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.467464 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.470614 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-logs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.471045 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.471793 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-config-data-custom\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.475537 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.480249 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.497574 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rfkr\" (UniqueName: \"kubernetes.io/projected/3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca-kube-api-access-2rfkr\") pod \"cinder-api-0\" (UID: \"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca\") " pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.505009 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.532804 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-c965t"] Dec 03 06:01:11 crc kubenswrapper[4810]: E1203 06:01:11.533323 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a8164cf-937e-4f52-a03e-00708ad12ebb" containerName="keystone-cron" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.533341 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a8164cf-937e-4f52-a03e-00708ad12ebb" containerName="keystone-cron" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.533516 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a8164cf-937e-4f52-a03e-00708ad12ebb" containerName="keystone-cron" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.534243 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.539682 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-c965t"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.559717 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-fernet-keys\") pod \"3a8164cf-937e-4f52-a03e-00708ad12ebb\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.560030 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-config-data\") pod \"3a8164cf-937e-4f52-a03e-00708ad12ebb\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.560133 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvbb8\" (UniqueName: \"kubernetes.io/projected/3a8164cf-937e-4f52-a03e-00708ad12ebb-kube-api-access-gvbb8\") pod \"3a8164cf-937e-4f52-a03e-00708ad12ebb\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.560395 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-combined-ca-bundle\") pod \"3a8164cf-937e-4f52-a03e-00708ad12ebb\" (UID: \"3a8164cf-937e-4f52-a03e-00708ad12ebb\") " Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.560793 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzl7j\" (UniqueName: \"kubernetes.io/projected/266d33c8-6cee-4e1c-aec4-e2f291b185e8-kube-api-access-fzl7j\") pod \"nova-api-db-create-dx9c7\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.560937 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52d9a37a-b91f-455c-9f2a-c0b60832ff78-operator-scripts\") pod \"nova-cell0-db-create-c965t\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.561035 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwrt4\" (UniqueName: \"kubernetes.io/projected/52d9a37a-b91f-455c-9f2a-c0b60832ff78-kube-api-access-wwrt4\") pod \"nova-cell0-db-create-c965t\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.561119 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/266d33c8-6cee-4e1c-aec4-e2f291b185e8-operator-scripts\") pod \"nova-api-db-create-dx9c7\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.562095 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/266d33c8-6cee-4e1c-aec4-e2f291b185e8-operator-scripts\") pod \"nova-api-db-create-dx9c7\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.566084 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3a8164cf-937e-4f52-a03e-00708ad12ebb" (UID: "3a8164cf-937e-4f52-a03e-00708ad12ebb"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.569035 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-54f5-account-create-update-79mzp"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.570549 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.570951 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a8164cf-937e-4f52-a03e-00708ad12ebb-kube-api-access-gvbb8" (OuterVolumeSpecName: "kube-api-access-gvbb8") pod "3a8164cf-937e-4f52-a03e-00708ad12ebb" (UID: "3a8164cf-937e-4f52-a03e-00708ad12ebb"). InnerVolumeSpecName "kube-api-access-gvbb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.575138 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.598577 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-54f5-account-create-update-79mzp"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.601841 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzl7j\" (UniqueName: \"kubernetes.io/projected/266d33c8-6cee-4e1c-aec4-e2f291b185e8-kube-api-access-fzl7j\") pod \"nova-api-db-create-dx9c7\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.647246 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.668238 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52d9a37a-b91f-455c-9f2a-c0b60832ff78-operator-scripts\") pod \"nova-cell0-db-create-c965t\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.668326 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwrt4\" (UniqueName: \"kubernetes.io/projected/52d9a37a-b91f-455c-9f2a-c0b60832ff78-kube-api-access-wwrt4\") pod \"nova-cell0-db-create-c965t\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.668352 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh87t\" (UniqueName: \"kubernetes.io/projected/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-kube-api-access-wh87t\") pod \"nova-api-54f5-account-create-update-79mzp\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.668434 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-operator-scripts\") pod \"nova-api-54f5-account-create-update-79mzp\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.668684 4810 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.668700 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvbb8\" (UniqueName: \"kubernetes.io/projected/3a8164cf-937e-4f52-a03e-00708ad12ebb-kube-api-access-gvbb8\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.669106 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52d9a37a-b91f-455c-9f2a-c0b60832ff78-operator-scripts\") pod \"nova-cell0-db-create-c965t\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.689920 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwrt4\" (UniqueName: \"kubernetes.io/projected/52d9a37a-b91f-455c-9f2a-c0b60832ff78-kube-api-access-wwrt4\") pod \"nova-cell0-db-create-c965t\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.699046 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a8164cf-937e-4f52-a03e-00708ad12ebb" (UID: "3a8164cf-937e-4f52-a03e-00708ad12ebb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.701526 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-config-data" (OuterVolumeSpecName: "config-data") pod "3a8164cf-937e-4f52-a03e-00708ad12ebb" (UID: "3a8164cf-937e-4f52-a03e-00708ad12ebb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.721992 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-5hwv5"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.723627 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.745658 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b0f1-account-create-update-kjdzz"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.747941 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.751326 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.764465 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5hwv5"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770269 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh87t\" (UniqueName: \"kubernetes.io/projected/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-kube-api-access-wh87t\") pod \"nova-api-54f5-account-create-update-79mzp\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770323 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-operator-scripts\") pod \"nova-api-54f5-account-create-update-79mzp\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770374 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2092357-d70b-4bc0-ac9e-3660aff8d920-operator-scripts\") pod \"nova-cell1-db-create-5hwv5\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770416 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lccxq\" (UniqueName: \"kubernetes.io/projected/7e220d06-666c-4087-8cb7-89a996c55933-kube-api-access-lccxq\") pod \"nova-cell0-b0f1-account-create-update-kjdzz\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770457 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45mxd\" (UniqueName: \"kubernetes.io/projected/f2092357-d70b-4bc0-ac9e-3660aff8d920-kube-api-access-45mxd\") pod \"nova-cell1-db-create-5hwv5\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770488 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e220d06-666c-4087-8cb7-89a996c55933-operator-scripts\") pod \"nova-cell0-b0f1-account-create-update-kjdzz\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770532 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.770543 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a8164cf-937e-4f52-a03e-00708ad12ebb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.771426 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-operator-scripts\") pod \"nova-api-54f5-account-create-update-79mzp\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.786627 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b0f1-account-create-update-kjdzz"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.787954 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh87t\" (UniqueName: \"kubernetes.io/projected/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-kube-api-access-wh87t\") pod \"nova-api-54f5-account-create-update-79mzp\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.828196 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.854712 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.872033 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45mxd\" (UniqueName: \"kubernetes.io/projected/f2092357-d70b-4bc0-ac9e-3660aff8d920-kube-api-access-45mxd\") pod \"nova-cell1-db-create-5hwv5\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.872108 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e220d06-666c-4087-8cb7-89a996c55933-operator-scripts\") pod \"nova-cell0-b0f1-account-create-update-kjdzz\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.872227 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2092357-d70b-4bc0-ac9e-3660aff8d920-operator-scripts\") pod \"nova-cell1-db-create-5hwv5\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.872272 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lccxq\" (UniqueName: \"kubernetes.io/projected/7e220d06-666c-4087-8cb7-89a996c55933-kube-api-access-lccxq\") pod \"nova-cell0-b0f1-account-create-update-kjdzz\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.873782 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2092357-d70b-4bc0-ac9e-3660aff8d920-operator-scripts\") pod \"nova-cell1-db-create-5hwv5\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.875278 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e220d06-666c-4087-8cb7-89a996c55933-operator-scripts\") pod \"nova-cell0-b0f1-account-create-update-kjdzz\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.909519 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45mxd\" (UniqueName: \"kubernetes.io/projected/f2092357-d70b-4bc0-ac9e-3660aff8d920-kube-api-access-45mxd\") pod \"nova-cell1-db-create-5hwv5\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.915627 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.924226 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lccxq\" (UniqueName: \"kubernetes.io/projected/7e220d06-666c-4087-8cb7-89a996c55933-kube-api-access-lccxq\") pod \"nova-cell0-b0f1-account-create-update-kjdzz\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.962485 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-57b4-account-create-update-qbvs6"] Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.964421 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.972291 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 06:01:11 crc kubenswrapper[4810]: I1203 06:01:11.978175 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-57b4-account-create-update-qbvs6"] Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.074768 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826ba06a-b58a-4700-a8ff-f6512e825a23-operator-scripts\") pod \"nova-cell1-57b4-account-create-update-qbvs6\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.074878 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lffpr\" (UniqueName: \"kubernetes.io/projected/826ba06a-b58a-4700-a8ff-f6512e825a23-kube-api-access-lffpr\") pod \"nova-cell1-57b4-account-create-update-qbvs6\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.125008 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerStarted","Data":"7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a"} Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.125783 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-central-agent" containerID="cri-o://379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6" gracePeriod=30 Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.126186 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.126530 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="proxy-httpd" containerID="cri-o://7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a" gracePeriod=30 Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.126587 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="sg-core" containerID="cri-o://aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b" gracePeriod=30 Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.126640 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-notification-agent" containerID="cri-o://4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248" gracePeriod=30 Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.169177 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.175184 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412361-hn96l" event={"ID":"3a8164cf-937e-4f52-a03e-00708ad12ebb","Type":"ContainerDied","Data":"e3e172bf7b1b124bf92cf9377e7df5e8177e05bdbfd225cf76f940dedfb39e4c"} Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.175230 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3e172bf7b1b124bf92cf9377e7df5e8177e05bdbfd225cf76f940dedfb39e4c" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.175324 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412361-hn96l" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.176531 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lffpr\" (UniqueName: \"kubernetes.io/projected/826ba06a-b58a-4700-a8ff-f6512e825a23-kube-api-access-lffpr\") pod \"nova-cell1-57b4-account-create-update-qbvs6\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.176705 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826ba06a-b58a-4700-a8ff-f6512e825a23-operator-scripts\") pod \"nova-cell1-57b4-account-create-update-qbvs6\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.177975 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.178914 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826ba06a-b58a-4700-a8ff-f6512e825a23-operator-scripts\") pod \"nova-cell1-57b4-account-create-update-qbvs6\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.213246 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lffpr\" (UniqueName: \"kubernetes.io/projected/826ba06a-b58a-4700-a8ff-f6512e825a23-kube-api-access-lffpr\") pod \"nova-cell1-57b4-account-create-update-qbvs6\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.229785 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5014475210000002 podStartE2EDuration="5.229759729s" podCreationTimestamp="2025-12-03 06:01:07 +0000 UTC" firstStartedPulling="2025-12-03 06:01:08.271764915 +0000 UTC m=+1192.207225756" lastFinishedPulling="2025-12-03 06:01:11.000077113 +0000 UTC m=+1194.935537964" observedRunningTime="2025-12-03 06:01:12.212372254 +0000 UTC m=+1196.147833095" watchObservedRunningTime="2025-12-03 06:01:12.229759729 +0000 UTC m=+1196.165220560" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.299834 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.312616 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.463302 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1931421c-a4c1-47fd-b4a0-dfec6d803de9" path="/var/lib/kubelet/pods/1931421c-a4c1-47fd-b4a0-dfec6d803de9/volumes" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.466449 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c435fcb-cba8-43e8-b6ea-f273647f0264" path="/var/lib/kubelet/pods/5c435fcb-cba8-43e8-b6ea-f273647f0264/volumes" Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.842179 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-54f5-account-create-update-79mzp"] Dec 03 06:01:12 crc kubenswrapper[4810]: W1203 06:01:12.853892 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea042a9c_a4d6_4a64_9954_7a5b9f197ea3.slice/crio-d1b2d3492c2fa2722ab1e75e7ab7c083a3fe93da79626c228b074fcdc8a5828e WatchSource:0}: Error finding container d1b2d3492c2fa2722ab1e75e7ab7c083a3fe93da79626c228b074fcdc8a5828e: Status 404 returned error can't find the container with id d1b2d3492c2fa2722ab1e75e7ab7c083a3fe93da79626c228b074fcdc8a5828e Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.868429 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-dx9c7"] Dec 03 06:01:12 crc kubenswrapper[4810]: I1203 06:01:12.875370 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-c965t"] Dec 03 06:01:12 crc kubenswrapper[4810]: W1203 06:01:12.878876 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52d9a37a_b91f_455c_9f2a_c0b60832ff78.slice/crio-143519bbc09bf62966f7fedeb2de636cb4a6eb9e5b98da85f3aaaca89b3075d4 WatchSource:0}: Error finding container 143519bbc09bf62966f7fedeb2de636cb4a6eb9e5b98da85f3aaaca89b3075d4: Status 404 returned error can't find the container with id 143519bbc09bf62966f7fedeb2de636cb4a6eb9e5b98da85f3aaaca89b3075d4 Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.103282 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-57b4-account-create-update-qbvs6"] Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.112890 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b0f1-account-create-update-kjdzz"] Dec 03 06:01:13 crc kubenswrapper[4810]: W1203 06:01:13.118169 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e220d06_666c_4087_8cb7_89a996c55933.slice/crio-7a9995b9598f081e29024d5aee9e3c938ea22621d9dc1a7bc961e85c5ce10b71 WatchSource:0}: Error finding container 7a9995b9598f081e29024d5aee9e3c938ea22621d9dc1a7bc961e85c5ce10b71: Status 404 returned error can't find the container with id 7a9995b9598f081e29024d5aee9e3c938ea22621d9dc1a7bc961e85c5ce10b71 Dec 03 06:01:13 crc kubenswrapper[4810]: W1203 06:01:13.120251 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2092357_d70b_4bc0_ac9e_3660aff8d920.slice/crio-b71222d7e712ef1fbe6a77f7a777acf6ef0ba7e144374e834b74a40b3df20e32 WatchSource:0}: Error finding container b71222d7e712ef1fbe6a77f7a777acf6ef0ba7e144374e834b74a40b3df20e32: Status 404 returned error can't find the container with id b71222d7e712ef1fbe6a77f7a777acf6ef0ba7e144374e834b74a40b3df20e32 Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.121489 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5hwv5"] Dec 03 06:01:13 crc kubenswrapper[4810]: W1203 06:01:13.122799 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod826ba06a_b58a_4700_a8ff_f6512e825a23.slice/crio-5c525d4eb348599f455baf2f00f16062e25dd8c7634170bed7adceefeefb4608 WatchSource:0}: Error finding container 5c525d4eb348599f455baf2f00f16062e25dd8c7634170bed7adceefeefb4608: Status 404 returned error can't find the container with id 5c525d4eb348599f455baf2f00f16062e25dd8c7634170bed7adceefeefb4608 Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.198664 4810 generic.go:334] "Generic (PLEG): container finished" podID="2c726589-6410-4893-bf46-f0585c3da8a2" containerID="7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a" exitCode=0 Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.198712 4810 generic.go:334] "Generic (PLEG): container finished" podID="2c726589-6410-4893-bf46-f0585c3da8a2" containerID="aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b" exitCode=2 Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.198722 4810 generic.go:334] "Generic (PLEG): container finished" podID="2c726589-6410-4893-bf46-f0585c3da8a2" containerID="4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248" exitCode=0 Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.198807 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerDied","Data":"7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.198841 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerDied","Data":"aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.198866 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerDied","Data":"4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.201654 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-54f5-account-create-update-79mzp" event={"ID":"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3","Type":"ContainerStarted","Data":"b65cbb71ee610603bc16586f32dcad3af45a61894ce5c0412af1393e8c6c8374"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.201975 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-54f5-account-create-update-79mzp" event={"ID":"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3","Type":"ContainerStarted","Data":"d1b2d3492c2fa2722ab1e75e7ab7c083a3fe93da79626c228b074fcdc8a5828e"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.208887 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5hwv5" event={"ID":"f2092357-d70b-4bc0-ac9e-3660aff8d920","Type":"ContainerStarted","Data":"b71222d7e712ef1fbe6a77f7a777acf6ef0ba7e144374e834b74a40b3df20e32"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.211689 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" event={"ID":"826ba06a-b58a-4700-a8ff-f6512e825a23","Type":"ContainerStarted","Data":"5c525d4eb348599f455baf2f00f16062e25dd8c7634170bed7adceefeefb4608"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.214014 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca","Type":"ContainerStarted","Data":"bb291377d968373bbf12776aec69135801c539ff7691dea9a886a725054530b4"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.224389 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-54f5-account-create-update-79mzp" podStartSLOduration=2.224371531 podStartE2EDuration="2.224371531s" podCreationTimestamp="2025-12-03 06:01:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:13.222123722 +0000 UTC m=+1197.157584563" watchObservedRunningTime="2025-12-03 06:01:13.224371531 +0000 UTC m=+1197.159832372" Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.228062 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dx9c7" event={"ID":"266d33c8-6cee-4e1c-aec4-e2f291b185e8","Type":"ContainerStarted","Data":"1175edabaaedae8bc99cb4ed4076191ca11dd00c03b97e42d8d09c86644817de"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.239540 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-c965t" event={"ID":"52d9a37a-b91f-455c-9f2a-c0b60832ff78","Type":"ContainerStarted","Data":"143519bbc09bf62966f7fedeb2de636cb4a6eb9e5b98da85f3aaaca89b3075d4"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.251447 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" event={"ID":"7e220d06-666c-4087-8cb7-89a996c55933","Type":"ContainerStarted","Data":"7a9995b9598f081e29024d5aee9e3c938ea22621d9dc1a7bc961e85c5ce10b71"} Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.252592 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-dx9c7" podStartSLOduration=2.2525708890000002 podStartE2EDuration="2.252570889s" podCreationTimestamp="2025-12-03 06:01:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:13.251873021 +0000 UTC m=+1197.187333862" watchObservedRunningTime="2025-12-03 06:01:13.252570889 +0000 UTC m=+1197.188031730" Dec 03 06:01:13 crc kubenswrapper[4810]: I1203 06:01:13.279603 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-c965t" podStartSLOduration=2.279577596 podStartE2EDuration="2.279577596s" podCreationTimestamp="2025-12-03 06:01:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:13.268464275 +0000 UTC m=+1197.203925126" watchObservedRunningTime="2025-12-03 06:01:13.279577596 +0000 UTC m=+1197.215038437" Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.342663 4810 generic.go:334] "Generic (PLEG): container finished" podID="f2092357-d70b-4bc0-ac9e-3660aff8d920" containerID="d0f69f43c6fb537893db03308455486ea5f2c35c42f2110383c9d4df98d20b27" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.343334 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5hwv5" event={"ID":"f2092357-d70b-4bc0-ac9e-3660aff8d920","Type":"ContainerDied","Data":"d0f69f43c6fb537893db03308455486ea5f2c35c42f2110383c9d4df98d20b27"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.345032 4810 generic.go:334] "Generic (PLEG): container finished" podID="826ba06a-b58a-4700-a8ff-f6512e825a23" containerID="fbcafc073b219f82b56af7642471f0f85154534e6a4817246b107b0036b05efc" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.345065 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" event={"ID":"826ba06a-b58a-4700-a8ff-f6512e825a23","Type":"ContainerDied","Data":"fbcafc073b219f82b56af7642471f0f85154534e6a4817246b107b0036b05efc"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.382898 4810 generic.go:334] "Generic (PLEG): container finished" podID="266d33c8-6cee-4e1c-aec4-e2f291b185e8" containerID="5a8267f2778112568ca98f1b1e1e5587f23df38a1d2c9606bdd2ff58dedbbd79" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.396407 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca","Type":"ContainerStarted","Data":"ab3192d9658c0f6a4171be38a38f4ee5e7f0e67b1cdc5604050b3841a4e8aad8"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.396468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dx9c7" event={"ID":"266d33c8-6cee-4e1c-aec4-e2f291b185e8","Type":"ContainerDied","Data":"5a8267f2778112568ca98f1b1e1e5587f23df38a1d2c9606bdd2ff58dedbbd79"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.396817 4810 generic.go:334] "Generic (PLEG): container finished" podID="52d9a37a-b91f-455c-9f2a-c0b60832ff78" containerID="60f1198d60fed1031cdb39a88dba7b0c1abb313d59833b474854be1adda7949f" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.396929 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-c965t" event={"ID":"52d9a37a-b91f-455c-9f2a-c0b60832ff78","Type":"ContainerDied","Data":"60f1198d60fed1031cdb39a88dba7b0c1abb313d59833b474854be1adda7949f"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.401901 4810 generic.go:334] "Generic (PLEG): container finished" podID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerID="3d23e0239bf0b68767a533f36795e571d2ac50c11a04b4954b8f17883a84b3f4" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.401950 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23b10432-e146-4a0a-a0d7-793e0dae69a2","Type":"ContainerDied","Data":"3d23e0239bf0b68767a533f36795e571d2ac50c11a04b4954b8f17883a84b3f4"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.405189 4810 generic.go:334] "Generic (PLEG): container finished" podID="7e220d06-666c-4087-8cb7-89a996c55933" containerID="58c3520dbc61640d15dd4d988ab559bf485c156a465f5c8826aaaa746a873770" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.405231 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" event={"ID":"7e220d06-666c-4087-8cb7-89a996c55933","Type":"ContainerDied","Data":"58c3520dbc61640d15dd4d988ab559bf485c156a465f5c8826aaaa746a873770"} Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.411625 4810 generic.go:334] "Generic (PLEG): container finished" podID="ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" containerID="b65cbb71ee610603bc16586f32dcad3af45a61894ce5c0412af1393e8c6c8374" exitCode=0 Dec 03 06:01:14 crc kubenswrapper[4810]: I1203 06:01:14.411685 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-54f5-account-create-update-79mzp" event={"ID":"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3","Type":"ContainerDied","Data":"b65cbb71ee610603bc16586f32dcad3af45a61894ce5c0412af1393e8c6c8374"} Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.518000 4810 patch_prober.go:28] interesting pod/openshift-kube-scheduler-crc container/kube-scheduler namespace/openshift-kube-scheduler: Liveness probe status=failure output="Get \"https://192.168.126.11:10259/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.518122 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podUID="3dcd261975c3d6b9a6ad6367fd4facd3" containerName="kube-scheduler" probeResult="failure" output="Get \"https://192.168.126.11:10259/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.533063 4810 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-h9458 container/olm-operator namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.533159 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h9458" podUID="530c5657-3703-41d8-9b52-c5f0ec8ce941" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.677570 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841530 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-logs\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841587 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r42mf\" (UniqueName: \"kubernetes.io/projected/23b10432-e146-4a0a-a0d7-793e0dae69a2-kube-api-access-r42mf\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841616 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-httpd-run\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841645 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-internal-tls-certs\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841674 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-combined-ca-bundle\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841723 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-scripts\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841800 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.841855 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-config-data\") pod \"23b10432-e146-4a0a-a0d7-793e0dae69a2\" (UID: \"23b10432-e146-4a0a-a0d7-793e0dae69a2\") " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.844127 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-logs" (OuterVolumeSpecName: "logs") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.845061 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:15 crc kubenswrapper[4810]: E1203 06:01:15.849852 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33fa5884_f2c0_4391_a719_81c4d43605dc.slice/crio-67a2000e90709b93bf53567e5c214fefd1c3d9afe0dba15be2c529a3c416b846\": RecentStats: unable to find data in memory cache]" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.855900 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-scripts" (OuterVolumeSpecName: "scripts") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.875035 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.875056 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23b10432-e146-4a0a-a0d7-793e0dae69a2-kube-api-access-r42mf" (OuterVolumeSpecName: "kube-api-access-r42mf") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "kube-api-access-r42mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.880826 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.944077 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.944131 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r42mf\" (UniqueName: \"kubernetes.io/projected/23b10432-e146-4a0a-a0d7-793e0dae69a2-kube-api-access-r42mf\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.944143 4810 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23b10432-e146-4a0a-a0d7-793e0dae69a2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.944152 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.944161 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.944184 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 03 06:01:15 crc kubenswrapper[4810]: I1203 06:01:15.969213 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.019588 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.021907 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.042649 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-b455b97b9-skz54" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.047395 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.047643 4810 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.050124 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-config-data" (OuterVolumeSpecName: "config-data") pod "23b10432-e146-4a0a-a0d7-793e0dae69a2" (UID: "23b10432-e146-4a0a-a0d7-793e0dae69a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.058543 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.153436 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lccxq\" (UniqueName: \"kubernetes.io/projected/7e220d06-666c-4087-8cb7-89a996c55933-kube-api-access-lccxq\") pod \"7e220d06-666c-4087-8cb7-89a996c55933\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.154196 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e220d06-666c-4087-8cb7-89a996c55933-operator-scripts\") pod \"7e220d06-666c-4087-8cb7-89a996c55933\" (UID: \"7e220d06-666c-4087-8cb7-89a996c55933\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.154986 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23b10432-e146-4a0a-a0d7-793e0dae69a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.158263 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e220d06-666c-4087-8cb7-89a996c55933-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e220d06-666c-4087-8cb7-89a996c55933" (UID: "7e220d06-666c-4087-8cb7-89a996c55933"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.171554 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e220d06-666c-4087-8cb7-89a996c55933-kube-api-access-lccxq" (OuterVolumeSpecName: "kube-api-access-lccxq") pod "7e220d06-666c-4087-8cb7-89a996c55933" (UID: "7e220d06-666c-4087-8cb7-89a996c55933"). InnerVolumeSpecName "kube-api-access-lccxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.208944 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.234367 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.273203 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh87t\" (UniqueName: \"kubernetes.io/projected/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-kube-api-access-wh87t\") pod \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.273279 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52d9a37a-b91f-455c-9f2a-c0b60832ff78-operator-scripts\") pod \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.273392 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-operator-scripts\") pod \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\" (UID: \"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.273453 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwrt4\" (UniqueName: \"kubernetes.io/projected/52d9a37a-b91f-455c-9f2a-c0b60832ff78-kube-api-access-wwrt4\") pod \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\" (UID: \"52d9a37a-b91f-455c-9f2a-c0b60832ff78\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.274398 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e220d06-666c-4087-8cb7-89a996c55933-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.274447 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lccxq\" (UniqueName: \"kubernetes.io/projected/7e220d06-666c-4087-8cb7-89a996c55933-kube-api-access-lccxq\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.276346 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" (UID: "ea042a9c-a4d6-4a64-9954-7a5b9f197ea3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.277844 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52d9a37a-b91f-455c-9f2a-c0b60832ff78-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "52d9a37a-b91f-455c-9f2a-c0b60832ff78" (UID: "52d9a37a-b91f-455c-9f2a-c0b60832ff78"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.278944 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d9a37a-b91f-455c-9f2a-c0b60832ff78-kube-api-access-wwrt4" (OuterVolumeSpecName: "kube-api-access-wwrt4") pod "52d9a37a-b91f-455c-9f2a-c0b60832ff78" (UID: "52d9a37a-b91f-455c-9f2a-c0b60832ff78"). InnerVolumeSpecName "kube-api-access-wwrt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.283030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-kube-api-access-wh87t" (OuterVolumeSpecName: "kube-api-access-wh87t") pod "ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" (UID: "ea042a9c-a4d6-4a64-9954-7a5b9f197ea3"). InnerVolumeSpecName "kube-api-access-wh87t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.294204 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.384462 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh87t\" (UniqueName: \"kubernetes.io/projected/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-kube-api-access-wh87t\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.385361 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52d9a37a-b91f-455c-9f2a-c0b60832ff78-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.385413 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.385424 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwrt4\" (UniqueName: \"kubernetes.io/projected/52d9a37a-b91f-455c-9f2a-c0b60832ff78-kube-api-access-wwrt4\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.487183 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzl7j\" (UniqueName: \"kubernetes.io/projected/266d33c8-6cee-4e1c-aec4-e2f291b185e8-kube-api-access-fzl7j\") pod \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.487342 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/266d33c8-6cee-4e1c-aec4-e2f291b185e8-operator-scripts\") pod \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\" (UID: \"266d33c8-6cee-4e1c-aec4-e2f291b185e8\") " Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.487948 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/266d33c8-6cee-4e1c-aec4-e2f291b185e8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "266d33c8-6cee-4e1c-aec4-e2f291b185e8" (UID: "266d33c8-6cee-4e1c-aec4-e2f291b185e8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.489905 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/266d33c8-6cee-4e1c-aec4-e2f291b185e8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.498007 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/266d33c8-6cee-4e1c-aec4-e2f291b185e8-kube-api-access-fzl7j" (OuterVolumeSpecName: "kube-api-access-fzl7j") pod "266d33c8-6cee-4e1c-aec4-e2f291b185e8" (UID: "266d33c8-6cee-4e1c-aec4-e2f291b185e8"). InnerVolumeSpecName "kube-api-access-fzl7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.592075 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzl7j\" (UniqueName: \"kubernetes.io/projected/266d33c8-6cee-4e1c-aec4-e2f291b185e8-kube-api-access-fzl7j\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.623533 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-54f5-account-create-update-79mzp" event={"ID":"ea042a9c-a4d6-4a64-9954-7a5b9f197ea3","Type":"ContainerDied","Data":"d1b2d3492c2fa2722ab1e75e7ab7c083a3fe93da79626c228b074fcdc8a5828e"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.623581 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1b2d3492c2fa2722ab1e75e7ab7c083a3fe93da79626c228b074fcdc8a5828e" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.623649 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-54f5-account-create-update-79mzp" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.635149 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5hwv5" event={"ID":"f2092357-d70b-4bc0-ac9e-3660aff8d920","Type":"ContainerDied","Data":"b71222d7e712ef1fbe6a77f7a777acf6ef0ba7e144374e834b74a40b3df20e32"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.635197 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b71222d7e712ef1fbe6a77f7a777acf6ef0ba7e144374e834b74a40b3df20e32" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.642456 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" event={"ID":"826ba06a-b58a-4700-a8ff-f6512e825a23","Type":"ContainerDied","Data":"5c525d4eb348599f455baf2f00f16062e25dd8c7634170bed7adceefeefb4608"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.642503 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c525d4eb348599f455baf2f00f16062e25dd8c7634170bed7adceefeefb4608" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.644993 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca","Type":"ContainerStarted","Data":"e4b97a25d79839839150ac15d56ec54c32e40059887f0a788cdcc7a42018c58d"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.645228 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.646752 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dx9c7" event={"ID":"266d33c8-6cee-4e1c-aec4-e2f291b185e8","Type":"ContainerDied","Data":"1175edabaaedae8bc99cb4ed4076191ca11dd00c03b97e42d8d09c86644817de"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.646773 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1175edabaaedae8bc99cb4ed4076191ca11dd00c03b97e42d8d09c86644817de" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.647025 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dx9c7" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.658787 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-c965t" event={"ID":"52d9a37a-b91f-455c-9f2a-c0b60832ff78","Type":"ContainerDied","Data":"143519bbc09bf62966f7fedeb2de636cb4a6eb9e5b98da85f3aaaca89b3075d4"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.658844 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="143519bbc09bf62966f7fedeb2de636cb4a6eb9e5b98da85f3aaaca89b3075d4" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.658842 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c965t" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.669764 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"23b10432-e146-4a0a-a0d7-793e0dae69a2","Type":"ContainerDied","Data":"d500def455109b885f461df76ccd4445175d06374a96d4c0c24eebb159eaef3e"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.669823 4810 scope.go:117] "RemoveContainer" containerID="3d23e0239bf0b68767a533f36795e571d2ac50c11a04b4954b8f17883a84b3f4" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.670069 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.690423 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" event={"ID":"7e220d06-666c-4087-8cb7-89a996c55933","Type":"ContainerDied","Data":"7a9995b9598f081e29024d5aee9e3c938ea22621d9dc1a7bc961e85c5ce10b71"} Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.690468 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a9995b9598f081e29024d5aee9e3c938ea22621d9dc1a7bc961e85c5ce10b71" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.690554 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b0f1-account-create-update-kjdzz" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.958250 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.972481 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.980237 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.980209944 podStartE2EDuration="5.980209944s" podCreationTimestamp="2025-12-03 06:01:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:16.668836164 +0000 UTC m=+1200.604297005" watchObservedRunningTime="2025-12-03 06:01:16.980209944 +0000 UTC m=+1200.915670775" Dec 03 06:01:16 crc kubenswrapper[4810]: I1203 06:01:16.983146 4810 scope.go:117] "RemoveContainer" containerID="7387ca5156bc4b58fbce2b30e2c7926eb0216ddca329561eb212067957565b08" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.058862 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.101415 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.113572 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2092357-d70b-4bc0-ac9e-3660aff8d920-operator-scripts\") pod \"f2092357-d70b-4bc0-ac9e-3660aff8d920\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.114008 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lffpr\" (UniqueName: \"kubernetes.io/projected/826ba06a-b58a-4700-a8ff-f6512e825a23-kube-api-access-lffpr\") pod \"826ba06a-b58a-4700-a8ff-f6512e825a23\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.114157 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45mxd\" (UniqueName: \"kubernetes.io/projected/f2092357-d70b-4bc0-ac9e-3660aff8d920-kube-api-access-45mxd\") pod \"f2092357-d70b-4bc0-ac9e-3660aff8d920\" (UID: \"f2092357-d70b-4bc0-ac9e-3660aff8d920\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.114300 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826ba06a-b58a-4700-a8ff-f6512e825a23-operator-scripts\") pod \"826ba06a-b58a-4700-a8ff-f6512e825a23\" (UID: \"826ba06a-b58a-4700-a8ff-f6512e825a23\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.118517 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/826ba06a-b58a-4700-a8ff-f6512e825a23-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "826ba06a-b58a-4700-a8ff-f6512e825a23" (UID: "826ba06a-b58a-4700-a8ff-f6512e825a23"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.119239 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2092357-d70b-4bc0-ac9e-3660aff8d920-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f2092357-d70b-4bc0-ac9e-3660aff8d920" (UID: "f2092357-d70b-4bc0-ac9e-3660aff8d920"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.127826 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/826ba06a-b58a-4700-a8ff-f6512e825a23-kube-api-access-lffpr" (OuterVolumeSpecName: "kube-api-access-lffpr") pod "826ba06a-b58a-4700-a8ff-f6512e825a23" (UID: "826ba06a-b58a-4700-a8ff-f6512e825a23"). InnerVolumeSpecName "kube-api-access-lffpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.129306 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.129985 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-log" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130011 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-log" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130032 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="266d33c8-6cee-4e1c-aec4-e2f291b185e8" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130041 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="266d33c8-6cee-4e1c-aec4-e2f291b185e8" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130053 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="826ba06a-b58a-4700-a8ff-f6512e825a23" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130059 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="826ba06a-b58a-4700-a8ff-f6512e825a23" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130083 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2092357-d70b-4bc0-ac9e-3660aff8d920" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130090 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2092357-d70b-4bc0-ac9e-3660aff8d920" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130122 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e220d06-666c-4087-8cb7-89a996c55933" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130129 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e220d06-666c-4087-8cb7-89a996c55933" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130143 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d9a37a-b91f-455c-9f2a-c0b60832ff78" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130151 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d9a37a-b91f-455c-9f2a-c0b60832ff78" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130163 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130169 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.130183 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-httpd" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130191 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-httpd" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130363 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-log" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130378 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d9a37a-b91f-455c-9f2a-c0b60832ff78" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130390 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" containerName="glance-httpd" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130401 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e220d06-666c-4087-8cb7-89a996c55933" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130419 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2092357-d70b-4bc0-ac9e-3660aff8d920" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130427 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130435 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="266d33c8-6cee-4e1c-aec4-e2f291b185e8" containerName="mariadb-database-create" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.130594 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="826ba06a-b58a-4700-a8ff-f6512e825a23" containerName="mariadb-account-create-update" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.131915 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.147174 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2092357-d70b-4bc0-ac9e-3660aff8d920-kube-api-access-45mxd" (OuterVolumeSpecName: "kube-api-access-45mxd") pod "f2092357-d70b-4bc0-ac9e-3660aff8d920" (UID: "f2092357-d70b-4bc0-ac9e-3660aff8d920"). InnerVolumeSpecName "kube-api-access-45mxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.148265 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.151264 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.160638 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.221537 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45mxd\" (UniqueName: \"kubernetes.io/projected/f2092357-d70b-4bc0-ac9e-3660aff8d920-kube-api-access-45mxd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.221779 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826ba06a-b58a-4700-a8ff-f6512e825a23-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.221793 4810 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2092357-d70b-4bc0-ac9e-3660aff8d920-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.221811 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lffpr\" (UniqueName: \"kubernetes.io/projected/826ba06a-b58a-4700-a8ff-f6512e825a23-kube-api-access-lffpr\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.301755 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.323189 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.323244 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.323301 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbsw5\" (UniqueName: \"kubernetes.io/projected/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-kube-api-access-wbsw5\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.323937 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.324107 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.324140 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.324252 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.324337 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.425546 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-scripts\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.426832 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-sg-core-conf-yaml\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.426958 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-combined-ca-bundle\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.427215 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-run-httpd\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.427307 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bc87\" (UniqueName: \"kubernetes.io/projected/2c726589-6410-4893-bf46-f0585c3da8a2-kube-api-access-2bc87\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.427428 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-log-httpd\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.427591 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-config-data\") pod \"2c726589-6410-4893-bf46-f0585c3da8a2\" (UID: \"2c726589-6410-4893-bf46-f0585c3da8a2\") " Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428013 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428112 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428182 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428264 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428335 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428445 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428538 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.428631 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbsw5\" (UniqueName: \"kubernetes.io/projected/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-kube-api-access-wbsw5\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.429963 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.432527 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.435807 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c726589-6410-4893-bf46-f0585c3da8a2-kube-api-access-2bc87" (OuterVolumeSpecName: "kube-api-access-2bc87") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "kube-api-access-2bc87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.436148 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.436237 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-scripts" (OuterVolumeSpecName: "scripts") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.436520 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.436767 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.445152 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.453507 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.461101 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.462306 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbsw5\" (UniqueName: \"kubernetes.io/projected/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-kube-api-access-wbsw5\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.473280 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b4868fe-4bdb-492d-bbb1-94d2793b41eb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.522013 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.532480 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.532542 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bc87\" (UniqueName: \"kubernetes.io/projected/2c726589-6410-4893-bf46-f0585c3da8a2-kube-api-access-2bc87\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.532556 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c726589-6410-4893-bf46-f0585c3da8a2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.532565 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.532574 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.540402 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b4868fe-4bdb-492d-bbb1-94d2793b41eb\") " pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.597808 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.638909 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.723920 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-config-data" (OuterVolumeSpecName: "config-data") pod "2c726589-6410-4893-bf46-f0585c3da8a2" (UID: "2c726589-6410-4893-bf46-f0585c3da8a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.737704 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.737760 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c726589-6410-4893-bf46-f0585c3da8a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.742896 4810 generic.go:334] "Generic (PLEG): container finished" podID="2c726589-6410-4893-bf46-f0585c3da8a2" containerID="379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6" exitCode=0 Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.743016 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-57b4-account-create-update-qbvs6" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.745802 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerDied","Data":"379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6"} Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.745890 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2c726589-6410-4893-bf46-f0585c3da8a2","Type":"ContainerDied","Data":"fe1f9edd5bcfe50ee5af1f92b5addb856b0bf928bc7393a8919d461b6da1ed69"} Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.745919 4810 scope.go:117] "RemoveContainer" containerID="7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.746198 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.749385 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hwv5" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.841022 4810 scope.go:117] "RemoveContainer" containerID="aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.883024 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.890453 4810 scope.go:117] "RemoveContainer" containerID="4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.900805 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.917244 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.917814 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-notification-agent" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.917829 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-notification-agent" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.917880 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-central-agent" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.917887 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-central-agent" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.917919 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="sg-core" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.917926 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="sg-core" Dec 03 06:01:17 crc kubenswrapper[4810]: E1203 06:01:17.917937 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="proxy-httpd" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.917943 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="proxy-httpd" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.918163 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-central-agent" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.918175 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="proxy-httpd" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.918188 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="sg-core" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.918206 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" containerName="ceilometer-notification-agent" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.920275 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.933107 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.941182 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.941381 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:01:17 crc kubenswrapper[4810]: I1203 06:01:17.974964 4810 scope.go:117] "RemoveContainer" containerID="379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.018820 4810 scope.go:117] "RemoveContainer" containerID="7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a" Dec 03 06:01:18 crc kubenswrapper[4810]: E1203 06:01:18.019575 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a\": container with ID starting with 7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a not found: ID does not exist" containerID="7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.019630 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a"} err="failed to get container status \"7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a\": rpc error: code = NotFound desc = could not find container \"7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a\": container with ID starting with 7463df5060138303cdaa36975aa8d8199df3b959865a13e3d6ce0cd00555d24a not found: ID does not exist" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.019668 4810 scope.go:117] "RemoveContainer" containerID="aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b" Dec 03 06:01:18 crc kubenswrapper[4810]: E1203 06:01:18.030917 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b\": container with ID starting with aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b not found: ID does not exist" containerID="aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.030981 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b"} err="failed to get container status \"aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b\": rpc error: code = NotFound desc = could not find container \"aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b\": container with ID starting with aac0899e26eed976c75d5bb40cc0e6d660f944dd0cdb61201e5f98076244e08b not found: ID does not exist" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.031014 4810 scope.go:117] "RemoveContainer" containerID="4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248" Dec 03 06:01:18 crc kubenswrapper[4810]: E1203 06:01:18.034123 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248\": container with ID starting with 4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248 not found: ID does not exist" containerID="4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.034158 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248"} err="failed to get container status \"4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248\": rpc error: code = NotFound desc = could not find container \"4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248\": container with ID starting with 4a0c9a9446710a29df58bc22eec9e885720672ec54c91fb10d7febb097423248 not found: ID does not exist" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.034179 4810 scope.go:117] "RemoveContainer" containerID="379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6" Dec 03 06:01:18 crc kubenswrapper[4810]: E1203 06:01:18.034464 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6\": container with ID starting with 379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6 not found: ID does not exist" containerID="379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.034487 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6"} err="failed to get container status \"379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6\": rpc error: code = NotFound desc = could not find container \"379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6\": container with ID starting with 379d432e08bbdc454907c222a6f4ad6f2386362c1ac37f9730f55385435dd0a6 not found: ID does not exist" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.043829 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.044198 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-config-data\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.044287 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-run-httpd\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.044364 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrgtv\" (UniqueName: \"kubernetes.io/projected/7523d2ba-5d53-4443-9e8b-324e113e394e-kube-api-access-xrgtv\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.044503 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.044717 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-scripts\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.044833 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-log-httpd\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.147117 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148201 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-config-data\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-run-httpd\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrgtv\" (UniqueName: \"kubernetes.io/projected/7523d2ba-5d53-4443-9e8b-324e113e394e-kube-api-access-xrgtv\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148365 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148393 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-scripts\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148426 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-log-httpd\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.148947 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-run-httpd\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.149044 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-log-httpd\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.153626 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.154386 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-config-data\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.157088 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-scripts\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.159523 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.167607 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrgtv\" (UniqueName: \"kubernetes.io/projected/7523d2ba-5d53-4443-9e8b-324e113e394e-kube-api-access-xrgtv\") pod \"ceilometer-0\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.261052 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.369757 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.408198 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23b10432-e146-4a0a-a0d7-793e0dae69a2" path="/var/lib/kubelet/pods/23b10432-e146-4a0a-a0d7-793e0dae69a2/volumes" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.410506 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c726589-6410-4893-bf46-f0585c3da8a2" path="/var/lib/kubelet/pods/2c726589-6410-4893-bf46-f0585c3da8a2/volumes" Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.739377 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.801994 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b4868fe-4bdb-492d-bbb1-94d2793b41eb","Type":"ContainerStarted","Data":"0728827b7f0f99c4ddb7d6928ad6bf38b736a1564c94e81f78a086ab78b28477"} Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.823936 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.825199 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-log" containerID="cri-o://1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009" gracePeriod=30 Dec 03 06:01:18 crc kubenswrapper[4810]: I1203 06:01:18.825662 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-httpd" containerID="cri-o://6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd" gracePeriod=30 Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.826092 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerStarted","Data":"93ad88519e275c613ea6da79c9667265acda86c5c72984e44eecf458381af4ea"} Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.826531 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerStarted","Data":"0b56eeb79f1071aebee109761f5b2622e40cf2a20f5f825557219b6b6c56e2fa"} Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.826546 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerStarted","Data":"48771b12ea7bb42cc0b6d2f230ad8e758bc8c2de76342c75b82d80cdd267eec1"} Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.828082 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b4868fe-4bdb-492d-bbb1-94d2793b41eb","Type":"ContainerStarted","Data":"92df185df6aae7518687cac726c03bf60b40ded564d01563681f75e201e90bd2"} Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.828152 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b4868fe-4bdb-492d-bbb1-94d2793b41eb","Type":"ContainerStarted","Data":"4a1d3251d9bf1f8c4eda2ae58678401925e5d04683ba10e1402171a3ec30f415"} Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.832268 4810 generic.go:334] "Generic (PLEG): container finished" podID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerID="1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009" exitCode=143 Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.832328 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf98f609-ab0e-41f9-8f8a-72324c3ac333","Type":"ContainerDied","Data":"1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009"} Dec 03 06:01:19 crc kubenswrapper[4810]: I1203 06:01:19.858805 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.858709724 podStartE2EDuration="2.858709724s" podCreationTimestamp="2025-12-03 06:01:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:19.847599623 +0000 UTC m=+1203.783060464" watchObservedRunningTime="2025-12-03 06:01:19.858709724 +0000 UTC m=+1203.794170565" Dec 03 06:01:20 crc kubenswrapper[4810]: I1203 06:01:20.845913 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerStarted","Data":"3fa764d25d9f1d4ff141f1bfb4b712de9c77c3d27e68819c874f1eef9fa7f666"} Dec 03 06:01:21 crc kubenswrapper[4810]: I1203 06:01:21.859334 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerStarted","Data":"9b70a84a8a48513db6b23572fc15b2d9a666fea14fa5c79d3c320eda9d3e346a"} Dec 03 06:01:21 crc kubenswrapper[4810]: I1203 06:01:21.860325 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:01:21 crc kubenswrapper[4810]: I1203 06:01:21.907472 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.250641368 podStartE2EDuration="4.907452306s" podCreationTimestamp="2025-12-03 06:01:17 +0000 UTC" firstStartedPulling="2025-12-03 06:01:18.781684064 +0000 UTC m=+1202.717144905" lastFinishedPulling="2025-12-03 06:01:21.438495002 +0000 UTC m=+1205.373955843" observedRunningTime="2025-12-03 06:01:21.890624285 +0000 UTC m=+1205.826085126" watchObservedRunningTime="2025-12-03 06:01:21.907452306 +0000 UTC m=+1205.842913147" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.089912 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nnphx"] Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.091126 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.094287 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.094636 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.095287 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-4pg6x" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.110890 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nnphx"] Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.132242 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6w6j\" (UniqueName: \"kubernetes.io/projected/3445e1e0-f732-451f-bb47-ad7e6492dfa3-kube-api-access-g6w6j\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.132292 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-scripts\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.132344 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.132406 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-config-data\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.234313 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6w6j\" (UniqueName: \"kubernetes.io/projected/3445e1e0-f732-451f-bb47-ad7e6492dfa3-kube-api-access-g6w6j\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.234390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-scripts\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.234473 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.234575 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-config-data\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.240584 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-scripts\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.244988 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.247280 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-config-data\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.261613 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6w6j\" (UniqueName: \"kubernetes.io/projected/3445e1e0-f732-451f-bb47-ad7e6492dfa3-kube-api-access-g6w6j\") pod \"nova-cell0-conductor-db-sync-nnphx\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.496308 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.722572 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.829868 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nnphx"] Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850507 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-httpd-run\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850556 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-config-data\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850627 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850715 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-combined-ca-bundle\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850781 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swhz9\" (UniqueName: \"kubernetes.io/projected/bf98f609-ab0e-41f9-8f8a-72324c3ac333-kube-api-access-swhz9\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850809 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-logs\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850929 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-scripts\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.850991 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-public-tls-certs\") pod \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\" (UID: \"bf98f609-ab0e-41f9-8f8a-72324c3ac333\") " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.851081 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.851494 4810 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.852507 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-logs" (OuterVolumeSpecName: "logs") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.859384 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-scripts" (OuterVolumeSpecName: "scripts") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.872487 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf98f609-ab0e-41f9-8f8a-72324c3ac333-kube-api-access-swhz9" (OuterVolumeSpecName: "kube-api-access-swhz9") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "kube-api-access-swhz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.877563 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.884304 4810 generic.go:334] "Generic (PLEG): container finished" podID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerID="6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd" exitCode=0 Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.884390 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf98f609-ab0e-41f9-8f8a-72324c3ac333","Type":"ContainerDied","Data":"6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd"} Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.884431 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf98f609-ab0e-41f9-8f8a-72324c3ac333","Type":"ContainerDied","Data":"77c38bbd51dc7f339a49421385b170d114e4c3b76ab2fc6b516a428a5b8ce786"} Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.884455 4810 scope.go:117] "RemoveContainer" containerID="6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.884641 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.897303 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nnphx" event={"ID":"3445e1e0-f732-451f-bb47-ad7e6492dfa3","Type":"ContainerStarted","Data":"0991d2834451bbc87574294d8d752f6dae184242955698d3e2f8bc97058da1af"} Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.921608 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.946957 4810 scope.go:117] "RemoveContainer" containerID="1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.954657 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.954705 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.954716 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.954724 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swhz9\" (UniqueName: \"kubernetes.io/projected/bf98f609-ab0e-41f9-8f8a-72324c3ac333-kube-api-access-swhz9\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.954760 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf98f609-ab0e-41f9-8f8a-72324c3ac333-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.965312 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.972205 4810 scope.go:117] "RemoveContainer" containerID="6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.972260 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-config-data" (OuterVolumeSpecName: "config-data") pod "bf98f609-ab0e-41f9-8f8a-72324c3ac333" (UID: "bf98f609-ab0e-41f9-8f8a-72324c3ac333"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:22 crc kubenswrapper[4810]: E1203 06:01:22.972849 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd\": container with ID starting with 6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd not found: ID does not exist" containerID="6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.972903 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd"} err="failed to get container status \"6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd\": rpc error: code = NotFound desc = could not find container \"6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd\": container with ID starting with 6d1ef181b5d147a4bfa7a1c8755f6d3009efe059b8e8f7ed3d8356a04c6772dd not found: ID does not exist" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.972937 4810 scope.go:117] "RemoveContainer" containerID="1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009" Dec 03 06:01:22 crc kubenswrapper[4810]: E1203 06:01:22.973357 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009\": container with ID starting with 1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009 not found: ID does not exist" containerID="1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.973414 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009"} err="failed to get container status \"1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009\": rpc error: code = NotFound desc = could not find container \"1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009\": container with ID starting with 1a212829f996093e44641f592731ae66f4e4eea0da3da04f320997e01fbbe009 not found: ID does not exist" Dec 03 06:01:22 crc kubenswrapper[4810]: I1203 06:01:22.990712 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.056665 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.057047 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.057109 4810 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf98f609-ab0e-41f9-8f8a-72324c3ac333-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.324107 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.341571 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.362090 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:01:23 crc kubenswrapper[4810]: E1203 06:01:23.362612 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-log" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.362634 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-log" Dec 03 06:01:23 crc kubenswrapper[4810]: E1203 06:01:23.362651 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-httpd" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.362658 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-httpd" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.362854 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-httpd" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.362878 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" containerName="glance-log" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.364001 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.367812 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.367994 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.384164 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506315 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-scripts\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506693 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7ncv\" (UniqueName: \"kubernetes.io/projected/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-kube-api-access-x7ncv\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506754 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506797 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-config-data\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506836 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-logs\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506881 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506899 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.506930 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608589 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608707 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-scripts\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608764 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7ncv\" (UniqueName: \"kubernetes.io/projected/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-kube-api-access-x7ncv\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608800 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608830 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-config-data\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608855 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-logs\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608888 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.608902 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.609592 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.609650 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-logs\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.609888 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.617176 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-config-data\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.617906 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.618024 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-scripts\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.618628 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.628163 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7ncv\" (UniqueName: \"kubernetes.io/projected/2f6dafcd-7f4b-47a9-b5ae-be22f6c84491-kube-api-access-x7ncv\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.652779 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491\") " pod="openstack/glance-default-external-api-0" Dec 03 06:01:23 crc kubenswrapper[4810]: I1203 06:01:23.704639 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 06:01:24 crc kubenswrapper[4810]: I1203 06:01:24.165527 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 06:01:24 crc kubenswrapper[4810]: I1203 06:01:24.396694 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf98f609-ab0e-41f9-8f8a-72324c3ac333" path="/var/lib/kubelet/pods/bf98f609-ab0e-41f9-8f8a-72324c3ac333/volumes" Dec 03 06:01:24 crc kubenswrapper[4810]: I1203 06:01:24.402173 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 06:01:24 crc kubenswrapper[4810]: W1203 06:01:24.408842 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f6dafcd_7f4b_47a9_b5ae_be22f6c84491.slice/crio-a3d667a94ec45375714d2f8d6e6dd450f8ae2551d9c1ea1d453e9611e5eba2be WatchSource:0}: Error finding container a3d667a94ec45375714d2f8d6e6dd450f8ae2551d9c1ea1d453e9611e5eba2be: Status 404 returned error can't find the container with id a3d667a94ec45375714d2f8d6e6dd450f8ae2551d9c1ea1d453e9611e5eba2be Dec 03 06:01:24 crc kubenswrapper[4810]: I1203 06:01:24.939211 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491","Type":"ContainerStarted","Data":"a3d667a94ec45375714d2f8d6e6dd450f8ae2551d9c1ea1d453e9611e5eba2be"} Dec 03 06:01:25 crc kubenswrapper[4810]: I1203 06:01:25.956713 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491","Type":"ContainerStarted","Data":"b18472d9a706101707310da01cb228f42e61cce0c6badd6dfb652a56d7025a19"} Dec 03 06:01:26 crc kubenswrapper[4810]: I1203 06:01:26.973215 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2f6dafcd-7f4b-47a9-b5ae-be22f6c84491","Type":"ContainerStarted","Data":"f6b082beb6b1c85859b35c274feed3c3fc1a820c105c84cd61d1445de4504736"} Dec 03 06:01:26 crc kubenswrapper[4810]: I1203 06:01:26.998939 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.998908417 podStartE2EDuration="3.998908417s" podCreationTimestamp="2025-12-03 06:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:26.994689846 +0000 UTC m=+1210.930150677" watchObservedRunningTime="2025-12-03 06:01:26.998908417 +0000 UTC m=+1210.934369278" Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.187483 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.187836 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-central-agent" containerID="cri-o://0b56eeb79f1071aebee109761f5b2622e40cf2a20f5f825557219b6b6c56e2fa" gracePeriod=30 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.187920 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-notification-agent" containerID="cri-o://93ad88519e275c613ea6da79c9667265acda86c5c72984e44eecf458381af4ea" gracePeriod=30 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.187953 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="proxy-httpd" containerID="cri-o://9b70a84a8a48513db6b23572fc15b2d9a666fea14fa5c79d3c320eda9d3e346a" gracePeriod=30 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.187945 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="sg-core" containerID="cri-o://3fa764d25d9f1d4ff141f1bfb4b712de9c77c3d27e68819c874f1eef9fa7f666" gracePeriod=30 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.598049 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.598414 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.648368 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.648569 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.989498 4810 generic.go:334] "Generic (PLEG): container finished" podID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerID="9b70a84a8a48513db6b23572fc15b2d9a666fea14fa5c79d3c320eda9d3e346a" exitCode=0 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.989535 4810 generic.go:334] "Generic (PLEG): container finished" podID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerID="3fa764d25d9f1d4ff141f1bfb4b712de9c77c3d27e68819c874f1eef9fa7f666" exitCode=2 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.989543 4810 generic.go:334] "Generic (PLEG): container finished" podID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerID="93ad88519e275c613ea6da79c9667265acda86c5c72984e44eecf458381af4ea" exitCode=0 Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.989580 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerDied","Data":"9b70a84a8a48513db6b23572fc15b2d9a666fea14fa5c79d3c320eda9d3e346a"} Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.989636 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerDied","Data":"3fa764d25d9f1d4ff141f1bfb4b712de9c77c3d27e68819c874f1eef9fa7f666"} Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.989673 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerDied","Data":"93ad88519e275c613ea6da79c9667265acda86c5c72984e44eecf458381af4ea"} Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.990116 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:27 crc kubenswrapper[4810]: I1203 06:01:27.990150 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:30 crc kubenswrapper[4810]: I1203 06:01:30.146377 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:30 crc kubenswrapper[4810]: I1203 06:01:30.146956 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 06:01:30 crc kubenswrapper[4810]: I1203 06:01:30.160517 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 06:01:33 crc kubenswrapper[4810]: I1203 06:01:33.704832 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 06:01:33 crc kubenswrapper[4810]: I1203 06:01:33.705673 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 06:01:33 crc kubenswrapper[4810]: I1203 06:01:33.749697 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 06:01:33 crc kubenswrapper[4810]: I1203 06:01:33.750558 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 06:01:34 crc kubenswrapper[4810]: I1203 06:01:34.057134 4810 generic.go:334] "Generic (PLEG): container finished" podID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerID="0b56eeb79f1071aebee109761f5b2622e40cf2a20f5f825557219b6b6c56e2fa" exitCode=0 Dec 03 06:01:34 crc kubenswrapper[4810]: I1203 06:01:34.057208 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerDied","Data":"0b56eeb79f1071aebee109761f5b2622e40cf2a20f5f825557219b6b6c56e2fa"} Dec 03 06:01:34 crc kubenswrapper[4810]: I1203 06:01:34.057787 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 06:01:34 crc kubenswrapper[4810]: I1203 06:01:34.057864 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 06:01:36 crc kubenswrapper[4810]: I1203 06:01:36.233411 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 06:01:36 crc kubenswrapper[4810]: I1203 06:01:36.234013 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 06:01:36 crc kubenswrapper[4810]: I1203 06:01:36.260277 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.008575 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.124929 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7523d2ba-5d53-4443-9e8b-324e113e394e","Type":"ContainerDied","Data":"48771b12ea7bb42cc0b6d2f230ad8e758bc8c2de76342c75b82d80cdd267eec1"} Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.124991 4810 scope.go:117] "RemoveContainer" containerID="9b70a84a8a48513db6b23572fc15b2d9a666fea14fa5c79d3c320eda9d3e346a" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.125434 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.156116 4810 scope.go:117] "RemoveContainer" containerID="3fa764d25d9f1d4ff141f1bfb4b712de9c77c3d27e68819c874f1eef9fa7f666" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176196 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-combined-ca-bundle\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176365 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-run-httpd\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176435 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-log-httpd\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176457 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-sg-core-conf-yaml\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176476 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-config-data\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176506 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrgtv\" (UniqueName: \"kubernetes.io/projected/7523d2ba-5d53-4443-9e8b-324e113e394e-kube-api-access-xrgtv\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176570 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-scripts\") pod \"7523d2ba-5d53-4443-9e8b-324e113e394e\" (UID: \"7523d2ba-5d53-4443-9e8b-324e113e394e\") " Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.176944 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.177037 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.178230 4810 scope.go:117] "RemoveContainer" containerID="93ad88519e275c613ea6da79c9667265acda86c5c72984e44eecf458381af4ea" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.181836 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-scripts" (OuterVolumeSpecName: "scripts") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.182888 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7523d2ba-5d53-4443-9e8b-324e113e394e-kube-api-access-xrgtv" (OuterVolumeSpecName: "kube-api-access-xrgtv") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "kube-api-access-xrgtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.207262 4810 scope.go:117] "RemoveContainer" containerID="0b56eeb79f1071aebee109761f5b2622e40cf2a20f5f825557219b6b6c56e2fa" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.216488 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.278776 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.278815 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7523d2ba-5d53-4443-9e8b-324e113e394e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.278825 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.278835 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrgtv\" (UniqueName: \"kubernetes.io/projected/7523d2ba-5d53-4443-9e8b-324e113e394e-kube-api-access-xrgtv\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.278844 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.281827 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.307611 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-config-data" (OuterVolumeSpecName: "config-data") pod "7523d2ba-5d53-4443-9e8b-324e113e394e" (UID: "7523d2ba-5d53-4443-9e8b-324e113e394e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.380175 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.380218 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7523d2ba-5d53-4443-9e8b-324e113e394e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.451709 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.460077 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.486333 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:38 crc kubenswrapper[4810]: E1203 06:01:38.486823 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-central-agent" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.486842 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-central-agent" Dec 03 06:01:38 crc kubenswrapper[4810]: E1203 06:01:38.486855 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="proxy-httpd" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.486862 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="proxy-httpd" Dec 03 06:01:38 crc kubenswrapper[4810]: E1203 06:01:38.486901 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="sg-core" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.486907 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="sg-core" Dec 03 06:01:38 crc kubenswrapper[4810]: E1203 06:01:38.486918 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-notification-agent" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.486924 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-notification-agent" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.487104 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-central-agent" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.487121 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="sg-core" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.487143 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="ceilometer-notification-agent" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.487149 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" containerName="proxy-httpd" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.488874 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.491460 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.511997 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.534066 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.592495 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-config-data\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.592570 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-log-httpd\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.592598 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drxqc\" (UniqueName: \"kubernetes.io/projected/2923164b-620d-4496-bebb-3ce70a05beff-kube-api-access-drxqc\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.592635 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.594011 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-scripts\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.594223 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.594354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-run-httpd\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.697652 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-config-data\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.697824 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-log-httpd\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.697883 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drxqc\" (UniqueName: \"kubernetes.io/projected/2923164b-620d-4496-bebb-3ce70a05beff-kube-api-access-drxqc\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.697951 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.698044 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-scripts\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.698143 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.698201 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-run-httpd\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.698682 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-log-httpd\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.698821 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-run-httpd\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.703544 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.703653 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.704258 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-config-data\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.704478 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-scripts\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.725279 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drxqc\" (UniqueName: \"kubernetes.io/projected/2923164b-620d-4496-bebb-3ce70a05beff-kube-api-access-drxqc\") pod \"ceilometer-0\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " pod="openstack/ceilometer-0" Dec 03 06:01:38 crc kubenswrapper[4810]: I1203 06:01:38.833955 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:39 crc kubenswrapper[4810]: I1203 06:01:39.150601 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nnphx" event={"ID":"3445e1e0-f732-451f-bb47-ad7e6492dfa3","Type":"ContainerStarted","Data":"294f048c5ba1c6aa7fcee6ccd7db8fb0f95147b402c87e7441528633ae117b54"} Dec 03 06:01:39 crc kubenswrapper[4810]: I1203 06:01:39.184180 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-nnphx" podStartSLOduration=2.152009863 podStartE2EDuration="17.184158735s" podCreationTimestamp="2025-12-03 06:01:22 +0000 UTC" firstStartedPulling="2025-12-03 06:01:22.842814537 +0000 UTC m=+1206.778275378" lastFinishedPulling="2025-12-03 06:01:37.874963409 +0000 UTC m=+1221.810424250" observedRunningTime="2025-12-03 06:01:39.181466855 +0000 UTC m=+1223.116927696" watchObservedRunningTime="2025-12-03 06:01:39.184158735 +0000 UTC m=+1223.119619576" Dec 03 06:01:39 crc kubenswrapper[4810]: I1203 06:01:39.508314 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:40 crc kubenswrapper[4810]: I1203 06:01:40.178933 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerStarted","Data":"537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae"} Dec 03 06:01:40 crc kubenswrapper[4810]: I1203 06:01:40.179425 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerStarted","Data":"e27f24b51c1ddd2a62ec18e144a6fdc29367c3bf64be12c6243dbe1a84d6f59b"} Dec 03 06:01:40 crc kubenswrapper[4810]: I1203 06:01:40.388302 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7523d2ba-5d53-4443-9e8b-324e113e394e" path="/var/lib/kubelet/pods/7523d2ba-5d53-4443-9e8b-324e113e394e/volumes" Dec 03 06:01:41 crc kubenswrapper[4810]: I1203 06:01:41.191741 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerStarted","Data":"67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830"} Dec 03 06:01:41 crc kubenswrapper[4810]: I1203 06:01:41.427898 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:42 crc kubenswrapper[4810]: I1203 06:01:42.205503 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerStarted","Data":"3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba"} Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.232262 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerStarted","Data":"b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b"} Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.233128 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.232599 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="sg-core" containerID="cri-o://3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba" gracePeriod=30 Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.232507 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-central-agent" containerID="cri-o://537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae" gracePeriod=30 Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.232657 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-notification-agent" containerID="cri-o://67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830" gracePeriod=30 Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.232656 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="proxy-httpd" containerID="cri-o://b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b" gracePeriod=30 Dec 03 06:01:44 crc kubenswrapper[4810]: I1203 06:01:44.283059 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.7184555230000003 podStartE2EDuration="6.283036049s" podCreationTimestamp="2025-12-03 06:01:38 +0000 UTC" firstStartedPulling="2025-12-03 06:01:39.525594432 +0000 UTC m=+1223.461055293" lastFinishedPulling="2025-12-03 06:01:43.090174958 +0000 UTC m=+1227.025635819" observedRunningTime="2025-12-03 06:01:44.267109532 +0000 UTC m=+1228.202570463" watchObservedRunningTime="2025-12-03 06:01:44.283036049 +0000 UTC m=+1228.218496890" Dec 03 06:01:45 crc kubenswrapper[4810]: I1203 06:01:45.260395 4810 generic.go:334] "Generic (PLEG): container finished" podID="2923164b-620d-4496-bebb-3ce70a05beff" containerID="b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b" exitCode=0 Dec 03 06:01:45 crc kubenswrapper[4810]: I1203 06:01:45.261036 4810 generic.go:334] "Generic (PLEG): container finished" podID="2923164b-620d-4496-bebb-3ce70a05beff" containerID="3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba" exitCode=2 Dec 03 06:01:45 crc kubenswrapper[4810]: I1203 06:01:45.261058 4810 generic.go:334] "Generic (PLEG): container finished" podID="2923164b-620d-4496-bebb-3ce70a05beff" containerID="67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830" exitCode=0 Dec 03 06:01:45 crc kubenswrapper[4810]: I1203 06:01:45.260468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerDied","Data":"b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b"} Dec 03 06:01:45 crc kubenswrapper[4810]: I1203 06:01:45.261112 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerDied","Data":"3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba"} Dec 03 06:01:45 crc kubenswrapper[4810]: I1203 06:01:45.261134 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerDied","Data":"67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830"} Dec 03 06:01:50 crc kubenswrapper[4810]: I1203 06:01:50.315345 4810 generic.go:334] "Generic (PLEG): container finished" podID="3445e1e0-f732-451f-bb47-ad7e6492dfa3" containerID="294f048c5ba1c6aa7fcee6ccd7db8fb0f95147b402c87e7441528633ae117b54" exitCode=0 Dec 03 06:01:50 crc kubenswrapper[4810]: I1203 06:01:50.316145 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nnphx" event={"ID":"3445e1e0-f732-451f-bb47-ad7e6492dfa3","Type":"ContainerDied","Data":"294f048c5ba1c6aa7fcee6ccd7db8fb0f95147b402c87e7441528633ae117b54"} Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.722147 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.803577 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6w6j\" (UniqueName: \"kubernetes.io/projected/3445e1e0-f732-451f-bb47-ad7e6492dfa3-kube-api-access-g6w6j\") pod \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.803745 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-scripts\") pod \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.803813 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-config-data\") pod \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.803908 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-combined-ca-bundle\") pod \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\" (UID: \"3445e1e0-f732-451f-bb47-ad7e6492dfa3\") " Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.811352 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3445e1e0-f732-451f-bb47-ad7e6492dfa3-kube-api-access-g6w6j" (OuterVolumeSpecName: "kube-api-access-g6w6j") pod "3445e1e0-f732-451f-bb47-ad7e6492dfa3" (UID: "3445e1e0-f732-451f-bb47-ad7e6492dfa3"). InnerVolumeSpecName "kube-api-access-g6w6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.812026 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-scripts" (OuterVolumeSpecName: "scripts") pod "3445e1e0-f732-451f-bb47-ad7e6492dfa3" (UID: "3445e1e0-f732-451f-bb47-ad7e6492dfa3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.838979 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-config-data" (OuterVolumeSpecName: "config-data") pod "3445e1e0-f732-451f-bb47-ad7e6492dfa3" (UID: "3445e1e0-f732-451f-bb47-ad7e6492dfa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.839543 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3445e1e0-f732-451f-bb47-ad7e6492dfa3" (UID: "3445e1e0-f732-451f-bb47-ad7e6492dfa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.906701 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.906779 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.906801 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3445e1e0-f732-451f-bb47-ad7e6492dfa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:51 crc kubenswrapper[4810]: I1203 06:01:51.906823 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6w6j\" (UniqueName: \"kubernetes.io/projected/3445e1e0-f732-451f-bb47-ad7e6492dfa3-kube-api-access-g6w6j\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.381324 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nnphx" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.394146 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nnphx" event={"ID":"3445e1e0-f732-451f-bb47-ad7e6492dfa3","Type":"ContainerDied","Data":"0991d2834451bbc87574294d8d752f6dae184242955698d3e2f8bc97058da1af"} Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.394199 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0991d2834451bbc87574294d8d752f6dae184242955698d3e2f8bc97058da1af" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.537263 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 06:01:52 crc kubenswrapper[4810]: E1203 06:01:52.538834 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3445e1e0-f732-451f-bb47-ad7e6492dfa3" containerName="nova-cell0-conductor-db-sync" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.538860 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3445e1e0-f732-451f-bb47-ad7e6492dfa3" containerName="nova-cell0-conductor-db-sync" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.539251 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="3445e1e0-f732-451f-bb47-ad7e6492dfa3" containerName="nova-cell0-conductor-db-sync" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.540298 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.543044 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-4pg6x" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.543289 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.554608 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.623256 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fssc\" (UniqueName: \"kubernetes.io/projected/51f2670f-1de2-4383-a6a9-ea85a63a7586-kube-api-access-5fssc\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.623377 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f2670f-1de2-4383-a6a9-ea85a63a7586-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.623452 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f2670f-1de2-4383-a6a9-ea85a63a7586-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.725062 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f2670f-1de2-4383-a6a9-ea85a63a7586-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.725145 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f2670f-1de2-4383-a6a9-ea85a63a7586-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.725316 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fssc\" (UniqueName: \"kubernetes.io/projected/51f2670f-1de2-4383-a6a9-ea85a63a7586-kube-api-access-5fssc\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.733083 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f2670f-1de2-4383-a6a9-ea85a63a7586-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.733165 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f2670f-1de2-4383-a6a9-ea85a63a7586-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.749239 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fssc\" (UniqueName: \"kubernetes.io/projected/51f2670f-1de2-4383-a6a9-ea85a63a7586-kube-api-access-5fssc\") pod \"nova-cell0-conductor-0\" (UID: \"51f2670f-1de2-4383-a6a9-ea85a63a7586\") " pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.860675 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.871498 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.929699 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-run-httpd\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.929792 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-combined-ca-bundle\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.930033 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-sg-core-conf-yaml\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.930065 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-log-httpd\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.930114 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-config-data\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.930161 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drxqc\" (UniqueName: \"kubernetes.io/projected/2923164b-620d-4496-bebb-3ce70a05beff-kube-api-access-drxqc\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.930189 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-scripts\") pod \"2923164b-620d-4496-bebb-3ce70a05beff\" (UID: \"2923164b-620d-4496-bebb-3ce70a05beff\") " Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.931310 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.931936 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.942942 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-scripts" (OuterVolumeSpecName: "scripts") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.943001 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2923164b-620d-4496-bebb-3ce70a05beff-kube-api-access-drxqc" (OuterVolumeSpecName: "kube-api-access-drxqc") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "kube-api-access-drxqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:01:52 crc kubenswrapper[4810]: I1203 06:01:52.961328 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.033528 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.033564 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.033575 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drxqc\" (UniqueName: \"kubernetes.io/projected/2923164b-620d-4496-bebb-3ce70a05beff-kube-api-access-drxqc\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.033589 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.033598 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2923164b-620d-4496-bebb-3ce70a05beff-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.059961 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.081724 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-config-data" (OuterVolumeSpecName: "config-data") pod "2923164b-620d-4496-bebb-3ce70a05beff" (UID: "2923164b-620d-4496-bebb-3ce70a05beff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.135157 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.135201 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2923164b-620d-4496-bebb-3ce70a05beff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.337885 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 06:01:53 crc kubenswrapper[4810]: W1203 06:01:53.344808 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51f2670f_1de2_4383_a6a9_ea85a63a7586.slice/crio-dd627de0609e89eae0d844f82f15745c20fe3fd215a83489b66fb8bf1c29a512 WatchSource:0}: Error finding container dd627de0609e89eae0d844f82f15745c20fe3fd215a83489b66fb8bf1c29a512: Status 404 returned error can't find the container with id dd627de0609e89eae0d844f82f15745c20fe3fd215a83489b66fb8bf1c29a512 Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.402574 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"51f2670f-1de2-4383-a6a9-ea85a63a7586","Type":"ContainerStarted","Data":"dd627de0609e89eae0d844f82f15745c20fe3fd215a83489b66fb8bf1c29a512"} Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.406423 4810 generic.go:334] "Generic (PLEG): container finished" podID="2923164b-620d-4496-bebb-3ce70a05beff" containerID="537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae" exitCode=0 Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.406487 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.406512 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerDied","Data":"537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae"} Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.407105 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2923164b-620d-4496-bebb-3ce70a05beff","Type":"ContainerDied","Data":"e27f24b51c1ddd2a62ec18e144a6fdc29367c3bf64be12c6243dbe1a84d6f59b"} Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.407169 4810 scope.go:117] "RemoveContainer" containerID="b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.439410 4810 scope.go:117] "RemoveContainer" containerID="3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.479879 4810 scope.go:117] "RemoveContainer" containerID="67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.491901 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.513220 4810 scope.go:117] "RemoveContainer" containerID="537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.523603 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.529515 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.529996 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-central-agent" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530017 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-central-agent" Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.530029 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="proxy-httpd" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530036 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="proxy-httpd" Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.530073 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="sg-core" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530082 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="sg-core" Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.530094 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-notification-agent" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530100 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-notification-agent" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530277 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="proxy-httpd" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530296 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="sg-core" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530305 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-notification-agent" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.530324 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2923164b-620d-4496-bebb-3ce70a05beff" containerName="ceilometer-central-agent" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.532028 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.536142 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.537361 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.541977 4810 scope.go:117] "RemoveContainer" containerID="b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b" Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.542474 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b\": container with ID starting with b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b not found: ID does not exist" containerID="b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.542597 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b"} err="failed to get container status \"b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b\": rpc error: code = NotFound desc = could not find container \"b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b\": container with ID starting with b7dcf84dedfa8d28bf348d278f84a9dd43673450965a9de6ae7b77307f3b516b not found: ID does not exist" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.542688 4810 scope.go:117] "RemoveContainer" containerID="3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba" Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.543290 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba\": container with ID starting with 3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba not found: ID does not exist" containerID="3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.543467 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba"} err="failed to get container status \"3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba\": rpc error: code = NotFound desc = could not find container \"3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba\": container with ID starting with 3ec22758519f0d07cde498f1c3d9fe99809251d12b06479ca2c566bf047e05ba not found: ID does not exist" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.544344 4810 scope.go:117] "RemoveContainer" containerID="67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830" Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.544703 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830\": container with ID starting with 67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830 not found: ID does not exist" containerID="67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.544746 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830"} err="failed to get container status \"67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830\": rpc error: code = NotFound desc = could not find container \"67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830\": container with ID starting with 67bac33ed8d874ccd21cc6ed883ebb6235da383d6ff0003c63abf8e379b47830 not found: ID does not exist" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.544771 4810 scope.go:117] "RemoveContainer" containerID="537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.547994 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:53 crc kubenswrapper[4810]: E1203 06:01:53.548136 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae\": container with ID starting with 537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae not found: ID does not exist" containerID="537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.548219 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae"} err="failed to get container status \"537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae\": rpc error: code = NotFound desc = could not find container \"537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae\": container with ID starting with 537e7e0e8215869024a64a6b0e1c823fac9a4ca685deeb9e37440e943cfb7dae not found: ID does not exist" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.643648 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-log-httpd\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.643709 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.643749 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-run-httpd\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.643933 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-scripts\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.643987 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpktp\" (UniqueName: \"kubernetes.io/projected/52d3a8ca-61ea-46db-ace8-e7ff254da141-kube-api-access-rpktp\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.644036 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.644324 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-config-data\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.745845 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpktp\" (UniqueName: \"kubernetes.io/projected/52d3a8ca-61ea-46db-ace8-e7ff254da141-kube-api-access-rpktp\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.746062 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.746100 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-config-data\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.746126 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-log-httpd\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.746146 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.746162 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-run-httpd\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.746223 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-scripts\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.747841 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-log-httpd\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.747927 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-run-httpd\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.752172 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-config-data\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.752227 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.753181 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-scripts\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.753169 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.764444 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpktp\" (UniqueName: \"kubernetes.io/projected/52d3a8ca-61ea-46db-ace8-e7ff254da141-kube-api-access-rpktp\") pod \"ceilometer-0\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " pod="openstack/ceilometer-0" Dec 03 06:01:53 crc kubenswrapper[4810]: I1203 06:01:53.852888 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.338214 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.392189 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2923164b-620d-4496-bebb-3ce70a05beff" path="/var/lib/kubelet/pods/2923164b-620d-4496-bebb-3ce70a05beff/volumes" Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.421049 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"51f2670f-1de2-4383-a6a9-ea85a63a7586","Type":"ContainerStarted","Data":"e7c95a0492f93ae61dd93185387222bbef3dd7cc47567c0fb8eb82ceaab2ab73"} Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.424376 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.435580 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerStarted","Data":"7c930d3d69abec9a38fe08efed91eed18dae89532df1d32c78e2f1c93dcce528"} Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.445552 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.445528549 podStartE2EDuration="2.445528549s" podCreationTimestamp="2025-12-03 06:01:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:01:54.443803824 +0000 UTC m=+1238.379264665" watchObservedRunningTime="2025-12-03 06:01:54.445528549 +0000 UTC m=+1238.380989390" Dec 03 06:01:54 crc kubenswrapper[4810]: I1203 06:01:54.743473 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:01:55 crc kubenswrapper[4810]: I1203 06:01:55.450061 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerStarted","Data":"48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7"} Dec 03 06:01:55 crc kubenswrapper[4810]: I1203 06:01:55.450656 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerStarted","Data":"16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1"} Dec 03 06:01:56 crc kubenswrapper[4810]: I1203 06:01:56.464654 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerStarted","Data":"bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5"} Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.500582 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerStarted","Data":"ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d"} Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.500952 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-central-agent" containerID="cri-o://16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1" gracePeriod=30 Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.501060 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="proxy-httpd" containerID="cri-o://ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d" gracePeriod=30 Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.501086 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="sg-core" containerID="cri-o://bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5" gracePeriod=30 Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.501111 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-notification-agent" containerID="cri-o://48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7" gracePeriod=30 Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.502512 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:01:58 crc kubenswrapper[4810]: I1203 06:01:58.543951 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.345317108 podStartE2EDuration="5.54389704s" podCreationTimestamp="2025-12-03 06:01:53 +0000 UTC" firstStartedPulling="2025-12-03 06:01:54.34828165 +0000 UTC m=+1238.283742491" lastFinishedPulling="2025-12-03 06:01:57.546861582 +0000 UTC m=+1241.482322423" observedRunningTime="2025-12-03 06:01:58.529653165 +0000 UTC m=+1242.465114006" watchObservedRunningTime="2025-12-03 06:01:58.54389704 +0000 UTC m=+1242.479357881" Dec 03 06:01:59 crc kubenswrapper[4810]: I1203 06:01:59.523958 4810 generic.go:334] "Generic (PLEG): container finished" podID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerID="ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d" exitCode=0 Dec 03 06:01:59 crc kubenswrapper[4810]: I1203 06:01:59.524615 4810 generic.go:334] "Generic (PLEG): container finished" podID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerID="bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5" exitCode=2 Dec 03 06:01:59 crc kubenswrapper[4810]: I1203 06:01:59.524095 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerDied","Data":"ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d"} Dec 03 06:01:59 crc kubenswrapper[4810]: I1203 06:01:59.524700 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerDied","Data":"bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5"} Dec 03 06:01:59 crc kubenswrapper[4810]: I1203 06:01:59.524766 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerDied","Data":"48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7"} Dec 03 06:01:59 crc kubenswrapper[4810]: I1203 06:01:59.524642 4810 generic.go:334] "Generic (PLEG): container finished" podID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerID="48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7" exitCode=0 Dec 03 06:02:02 crc kubenswrapper[4810]: I1203 06:02:02.910949 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.623217 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-cl2k9"] Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.625814 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.630618 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.630895 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.648817 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cl2k9"] Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.690250 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbbmd\" (UniqueName: \"kubernetes.io/projected/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-kube-api-access-hbbmd\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.690723 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.690858 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-config-data\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.690994 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-scripts\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.793018 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.793367 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-config-data\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.794556 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-scripts\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.794720 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbbmd\" (UniqueName: \"kubernetes.io/projected/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-kube-api-access-hbbmd\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.801135 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-scripts\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.806214 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.817456 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-config-data\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.825368 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbbmd\" (UniqueName: \"kubernetes.io/projected/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-kube-api-access-hbbmd\") pod \"nova-cell0-cell-mapping-cl2k9\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.870658 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.872571 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.878903 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.898071 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14aef582-d59c-4621-9c63-580006ddb5a5-logs\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.898177 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-config-data\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.898247 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7l29\" (UniqueName: \"kubernetes.io/projected/14aef582-d59c-4621-9c63-580006ddb5a5-kube-api-access-c7l29\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.898281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.908815 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.920552 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.922456 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.928669 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.955350 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:03 crc kubenswrapper[4810]: I1203 06:02:03.964817 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.000444 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14aef582-d59c-4621-9c63-580006ddb5a5-logs\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.000989 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001015 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-config-data\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001072 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-config-data\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001103 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7l29\" (UniqueName: \"kubernetes.io/projected/14aef582-d59c-4621-9c63-580006ddb5a5-kube-api-access-c7l29\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001134 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e25ca83c-6778-4305-9439-0982c5271277-logs\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001157 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001194 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/e25ca83c-6778-4305-9439-0982c5271277-kube-api-access-t9zpz\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.001762 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14aef582-d59c-4621-9c63-580006ddb5a5-logs\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.019403 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.035391 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-config-data\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.054269 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7l29\" (UniqueName: \"kubernetes.io/projected/14aef582-d59c-4621-9c63-580006ddb5a5-kube-api-access-c7l29\") pod \"nova-api-0\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.067434 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.069025 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.077171 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.098638 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.103808 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxnkc\" (UniqueName: \"kubernetes.io/projected/550d872e-2f7f-4f79-8a54-69360fcad3ab-kube-api-access-zxnkc\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.103878 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e25ca83c-6778-4305-9439-0982c5271277-logs\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.103930 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/e25ca83c-6778-4305-9439-0982c5271277-kube-api-access-t9zpz\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.103964 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-config-data\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.104014 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.104047 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.104100 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-config-data\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.105054 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e25ca83c-6778-4305-9439-0982c5271277-logs\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.110419 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.127890 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74cfbb9557-lp28d"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.146368 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-config-data\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.178042 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/e25ca83c-6778-4305-9439-0982c5271277-kube-api-access-t9zpz\") pod \"nova-metadata-0\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.179925 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210334 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-sb\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210404 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-config\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210437 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210475 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd8ds\" (UniqueName: \"kubernetes.io/projected/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-kube-api-access-zd8ds\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210518 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-svc\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210578 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxnkc\" (UniqueName: \"kubernetes.io/projected/550d872e-2f7f-4f79-8a54-69360fcad3ab-kube-api-access-zxnkc\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210611 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-swift-storage-0\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210694 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-config-data\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.210718 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-nb\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.217089 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74cfbb9557-lp28d"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.224573 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-config-data\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.228643 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.242848 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.256745 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.261552 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxnkc\" (UniqueName: \"kubernetes.io/projected/550d872e-2f7f-4f79-8a54-69360fcad3ab-kube-api-access-zxnkc\") pod \"nova-scheduler-0\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.325026 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-sb\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.325115 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-config\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.325180 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd8ds\" (UniqueName: \"kubernetes.io/projected/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-kube-api-access-zd8ds\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.325242 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-svc\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.325324 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-swift-storage-0\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.325423 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-nb\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.327301 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-sb\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.327498 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-svc\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.327946 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-config\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.328092 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-swift-storage-0\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.328632 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-nb\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.340518 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.342130 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.358800 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.373505 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd8ds\" (UniqueName: \"kubernetes.io/projected/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-kube-api-access-zd8ds\") pod \"dnsmasq-dns-74cfbb9557-lp28d\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.482504 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.532350 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.544245 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.544336 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.544361 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgvzc\" (UniqueName: \"kubernetes.io/projected/1bde4997-caf1-41d4-8f41-7555a94311b2-kube-api-access-wgvzc\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.579890 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.646257 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.646334 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.646358 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgvzc\" (UniqueName: \"kubernetes.io/projected/1bde4997-caf1-41d4-8f41-7555a94311b2-kube-api-access-wgvzc\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.653983 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.661134 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.679226 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgvzc\" (UniqueName: \"kubernetes.io/projected/1bde4997-caf1-41d4-8f41-7555a94311b2-kube-api-access-wgvzc\") pod \"nova-cell1-novncproxy-0\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.718329 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.727940 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cl2k9"] Dec 03 06:02:04 crc kubenswrapper[4810]: I1203 06:02:04.930562 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.058161 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.382145 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:05 crc kubenswrapper[4810]: W1203 06:02:05.384304 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e6ce4c8_496c_49bc_a0b0_d4ac129f3314.slice/crio-2514345e6d86e9e562e76909f0c8d8ac915fd44ffa70c8daec41a7263c4d8d80 WatchSource:0}: Error finding container 2514345e6d86e9e562e76909f0c8d8ac915fd44ffa70c8daec41a7263c4d8d80: Status 404 returned error can't find the container with id 2514345e6d86e9e562e76909f0c8d8ac915fd44ffa70c8daec41a7263c4d8d80 Dec 03 06:02:05 crc kubenswrapper[4810]: W1203 06:02:05.389092 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550d872e_2f7f_4f79_8a54_69360fcad3ab.slice/crio-50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa WatchSource:0}: Error finding container 50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa: Status 404 returned error can't find the container with id 50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.391856 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74cfbb9557-lp28d"] Dec 03 06:02:05 crc kubenswrapper[4810]: W1203 06:02:05.392457 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bde4997_caf1_41d4_8f41_7555a94311b2.slice/crio-3fc5c79d2ae82d800b4d05fa12b42a34ec4d34e4580c4fae4883cd46b29d77b1 WatchSource:0}: Error finding container 3fc5c79d2ae82d800b4d05fa12b42a34ec4d34e4580c4fae4883cd46b29d77b1: Status 404 returned error can't find the container with id 3fc5c79d2ae82d800b4d05fa12b42a34ec4d34e4580c4fae4883cd46b29d77b1 Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.406546 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.700548 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7ts2b"] Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.702793 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.705869 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.707096 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.723220 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7ts2b"] Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.726483 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e25ca83c-6778-4305-9439-0982c5271277","Type":"ContainerStarted","Data":"1053b3b4dc7be5917e5f40132e75635850c4fb3d8d067f4ef8e155d566032fdd"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.729281 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14aef582-d59c-4621-9c63-580006ddb5a5","Type":"ContainerStarted","Data":"373381f07d955bc86144915095988de80fe0ee0be7b1daf783ac299c9e7512be"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.734174 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" event={"ID":"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314","Type":"ContainerStarted","Data":"2514345e6d86e9e562e76909f0c8d8ac915fd44ffa70c8daec41a7263c4d8d80"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.745918 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1bde4997-caf1-41d4-8f41-7555a94311b2","Type":"ContainerStarted","Data":"3fc5c79d2ae82d800b4d05fa12b42a34ec4d34e4580c4fae4883cd46b29d77b1"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.756226 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"550d872e-2f7f-4f79-8a54-69360fcad3ab","Type":"ContainerStarted","Data":"50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.770446 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cl2k9" event={"ID":"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed","Type":"ContainerStarted","Data":"efe26805e0c97ed6a5ae468813450dd7baed46a0fda72b25fdcdabe45537f2e7"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.770526 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cl2k9" event={"ID":"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed","Type":"ContainerStarted","Data":"52af320b4332f0384fec22ef5a05a52d5763d3dd2620307ec4c598db5442f146"} Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.804190 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-cl2k9" podStartSLOduration=2.804157558 podStartE2EDuration="2.804157558s" podCreationTimestamp="2025-12-03 06:02:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:05.790596921 +0000 UTC m=+1249.726057782" watchObservedRunningTime="2025-12-03 06:02:05.804157558 +0000 UTC m=+1249.739618399" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.897894 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-config-data\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.898140 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.901413 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl8sz\" (UniqueName: \"kubernetes.io/projected/dd75096a-f05e-412e-a147-cba8eb1474b9-kube-api-access-zl8sz\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:05 crc kubenswrapper[4810]: I1203 06:02:05.901658 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-scripts\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.004685 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.004790 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl8sz\" (UniqueName: \"kubernetes.io/projected/dd75096a-f05e-412e-a147-cba8eb1474b9-kube-api-access-zl8sz\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.004860 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-scripts\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.004911 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-config-data\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.023657 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.024240 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-config-data\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.028547 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl8sz\" (UniqueName: \"kubernetes.io/projected/dd75096a-f05e-412e-a147-cba8eb1474b9-kube-api-access-zl8sz\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.032844 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-scripts\") pod \"nova-cell1-conductor-db-sync-7ts2b\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.039281 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.244580 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.309885 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-config-data\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.310593 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-run-httpd\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.310625 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-sg-core-conf-yaml\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.310702 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpktp\" (UniqueName: \"kubernetes.io/projected/52d3a8ca-61ea-46db-ace8-e7ff254da141-kube-api-access-rpktp\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.310796 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-scripts\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.310866 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-log-httpd\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.311139 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-combined-ca-bundle\") pod \"52d3a8ca-61ea-46db-ace8-e7ff254da141\" (UID: \"52d3a8ca-61ea-46db-ace8-e7ff254da141\") " Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.312177 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.314363 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.322356 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d3a8ca-61ea-46db-ace8-e7ff254da141-kube-api-access-rpktp" (OuterVolumeSpecName: "kube-api-access-rpktp") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "kube-api-access-rpktp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.325908 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-scripts" (OuterVolumeSpecName: "scripts") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.368023 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.414037 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.414077 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.414090 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpktp\" (UniqueName: \"kubernetes.io/projected/52d3a8ca-61ea-46db-ace8-e7ff254da141-kube-api-access-rpktp\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.414099 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.414110 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52d3a8ca-61ea-46db-ace8-e7ff254da141-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.461759 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-config-data" (OuterVolumeSpecName: "config-data") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.500906 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52d3a8ca-61ea-46db-ace8-e7ff254da141" (UID: "52d3a8ca-61ea-46db-ace8-e7ff254da141"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.521146 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.521199 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d3a8ca-61ea-46db-ace8-e7ff254da141-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.572292 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7ts2b"] Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.785597 4810 generic.go:334] "Generic (PLEG): container finished" podID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerID="ecf6347a1ecd086e444627a27ad6f527d6afca2b7b3b72e7ca28d15c7aacc3e0" exitCode=0 Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.785689 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" event={"ID":"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314","Type":"ContainerDied","Data":"ecf6347a1ecd086e444627a27ad6f527d6afca2b7b3b72e7ca28d15c7aacc3e0"} Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.799757 4810 generic.go:334] "Generic (PLEG): container finished" podID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerID="16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1" exitCode=0 Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.800556 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerDied","Data":"16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1"} Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.800624 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"52d3a8ca-61ea-46db-ace8-e7ff254da141","Type":"ContainerDied","Data":"7c930d3d69abec9a38fe08efed91eed18dae89532df1d32c78e2f1c93dcce528"} Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.800648 4810 scope.go:117] "RemoveContainer" containerID="ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.800670 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.864516 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.876382 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.897632 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:06 crc kubenswrapper[4810]: E1203 06:02:06.906005 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-notification-agent" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906045 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-notification-agent" Dec 03 06:02:06 crc kubenswrapper[4810]: E1203 06:02:06.906076 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="sg-core" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906085 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="sg-core" Dec 03 06:02:06 crc kubenswrapper[4810]: E1203 06:02:06.906108 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-central-agent" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906120 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-central-agent" Dec 03 06:02:06 crc kubenswrapper[4810]: E1203 06:02:06.906129 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="proxy-httpd" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906135 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="proxy-httpd" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906317 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-notification-agent" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906337 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="sg-core" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906352 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="ceilometer-central-agent" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.906370 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" containerName="proxy-httpd" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.908379 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.916435 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.916755 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:02:06 crc kubenswrapper[4810]: I1203 06:02:06.918115 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.033888 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrr69\" (UniqueName: \"kubernetes.io/projected/204e9c97-7971-463d-8674-3f2cf51a4a97-kube-api-access-mrr69\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.035410 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.035567 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-scripts\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.035675 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-run-httpd\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.035771 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-log-httpd\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.035868 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-config-data\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.035990 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138499 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrr69\" (UniqueName: \"kubernetes.io/projected/204e9c97-7971-463d-8674-3f2cf51a4a97-kube-api-access-mrr69\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138568 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138604 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-scripts\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138637 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-run-httpd\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138657 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-log-httpd\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138681 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-config-data\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.138742 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.140282 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-log-httpd\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.140301 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-run-httpd\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.146046 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.148326 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.149392 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-scripts\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.154224 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-config-data\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.159672 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrr69\" (UniqueName: \"kubernetes.io/projected/204e9c97-7971-463d-8674-3f2cf51a4a97-kube-api-access-mrr69\") pod \"ceilometer-0\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.252194 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.695951 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.737212 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.818479 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" event={"ID":"dd75096a-f05e-412e-a147-cba8eb1474b9","Type":"ContainerStarted","Data":"a56974ed678da2c507d4eeabd0bb845eb41d372be64574fb4492b2c46ef27543"} Dec 03 06:02:07 crc kubenswrapper[4810]: I1203 06:02:07.982349 4810 scope.go:117] "RemoveContainer" containerID="bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.394133 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52d3a8ca-61ea-46db-ace8-e7ff254da141" path="/var/lib/kubelet/pods/52d3a8ca-61ea-46db-ace8-e7ff254da141/volumes" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.532221 4810 scope.go:117] "RemoveContainer" containerID="48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.778279 4810 scope.go:117] "RemoveContainer" containerID="16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.839595 4810 scope.go:117] "RemoveContainer" containerID="ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d" Dec 03 06:02:08 crc kubenswrapper[4810]: E1203 06:02:08.840261 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d\": container with ID starting with ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d not found: ID does not exist" containerID="ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.840362 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d"} err="failed to get container status \"ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d\": rpc error: code = NotFound desc = could not find container \"ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d\": container with ID starting with ea86d2b7152515b42987ff8201f6e1bd3875fb04519e09e6a6d2bcd6994e4a7d not found: ID does not exist" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.840474 4810 scope.go:117] "RemoveContainer" containerID="bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5" Dec 03 06:02:08 crc kubenswrapper[4810]: E1203 06:02:08.840792 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5\": container with ID starting with bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5 not found: ID does not exist" containerID="bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.840885 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5"} err="failed to get container status \"bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5\": rpc error: code = NotFound desc = could not find container \"bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5\": container with ID starting with bb8a03f6e95ad94d8365734803b1d75319d85ef235942dd360066f06d27629a5 not found: ID does not exist" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.840966 4810 scope.go:117] "RemoveContainer" containerID="48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7" Dec 03 06:02:08 crc kubenswrapper[4810]: E1203 06:02:08.841322 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7\": container with ID starting with 48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7 not found: ID does not exist" containerID="48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.841425 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7"} err="failed to get container status \"48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7\": rpc error: code = NotFound desc = could not find container \"48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7\": container with ID starting with 48c5724d5b77ef6777e72cbcaf33325d7b5f5c2f909c53ca364478af4f18ada7 not found: ID does not exist" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.841535 4810 scope.go:117] "RemoveContainer" containerID="16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1" Dec 03 06:02:08 crc kubenswrapper[4810]: E1203 06:02:08.841947 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1\": container with ID starting with 16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1 not found: ID does not exist" containerID="16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1" Dec 03 06:02:08 crc kubenswrapper[4810]: I1203 06:02:08.842092 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1"} err="failed to get container status \"16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1\": rpc error: code = NotFound desc = could not find container \"16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1\": container with ID starting with 16096be9e1007f6856afb49939b16549e1f7f30ed0bb0b2204932ae3e2d51ad1 not found: ID does not exist" Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.063639 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:09 crc kubenswrapper[4810]: W1203 06:02:09.074887 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod204e9c97_7971_463d_8674_3f2cf51a4a97.slice/crio-464ffe44d3ed9841e552f8cf9766d94208dc7b6681dc66fa118fc0e6d79e9876 WatchSource:0}: Error finding container 464ffe44d3ed9841e552f8cf9766d94208dc7b6681dc66fa118fc0e6d79e9876: Status 404 returned error can't find the container with id 464ffe44d3ed9841e552f8cf9766d94208dc7b6681dc66fa118fc0e6d79e9876 Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.880903 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" event={"ID":"dd75096a-f05e-412e-a147-cba8eb1474b9","Type":"ContainerStarted","Data":"51ec9842055502fc42451726abce9f64b172d32ae02e7a3e8f4ac3ce41b512e6"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.888598 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14aef582-d59c-4621-9c63-580006ddb5a5","Type":"ContainerStarted","Data":"810d402b7ecf1eb68c1e5927b914711e98e7f7c02784d742d35745f028b9b062"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.888663 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14aef582-d59c-4621-9c63-580006ddb5a5","Type":"ContainerStarted","Data":"45139548c56abbff54c9467ff91e3ce8d73e1b9df81fc4e2c151559ff1fde590"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.894242 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" event={"ID":"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314","Type":"ContainerStarted","Data":"08d78a127662ee8894f99424aa6413dc00b552a6e0b6ceb6e9760a083026c269"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.896290 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.904404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1bde4997-caf1-41d4-8f41-7555a94311b2","Type":"ContainerStarted","Data":"920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.904539 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="1bde4997-caf1-41d4-8f41-7555a94311b2" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067" gracePeriod=30 Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.920884 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" podStartSLOduration=4.920865202 podStartE2EDuration="4.920865202s" podCreationTimestamp="2025-12-03 06:02:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:09.912233145 +0000 UTC m=+1253.847693996" watchObservedRunningTime="2025-12-03 06:02:09.920865202 +0000 UTC m=+1253.856326043" Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.932711 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"550d872e-2f7f-4f79-8a54-69360fcad3ab","Type":"ContainerStarted","Data":"333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.950578 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.759124939 podStartE2EDuration="5.950553903s" podCreationTimestamp="2025-12-03 06:02:04 +0000 UTC" firstStartedPulling="2025-12-03 06:02:05.397222409 +0000 UTC m=+1249.332683250" lastFinishedPulling="2025-12-03 06:02:08.588651363 +0000 UTC m=+1252.524112214" observedRunningTime="2025-12-03 06:02:09.943821236 +0000 UTC m=+1253.879282087" watchObservedRunningTime="2025-12-03 06:02:09.950553903 +0000 UTC m=+1253.886014744" Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.980391 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" podStartSLOduration=5.980366257 podStartE2EDuration="5.980366257s" podCreationTimestamp="2025-12-03 06:02:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:09.967685954 +0000 UTC m=+1253.903146795" watchObservedRunningTime="2025-12-03 06:02:09.980366257 +0000 UTC m=+1253.915827099" Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.986833 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerStarted","Data":"bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.986891 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerStarted","Data":"464ffe44d3ed9841e552f8cf9766d94208dc7b6681dc66fa118fc0e6d79e9876"} Dec 03 06:02:09 crc kubenswrapper[4810]: I1203 06:02:09.996877 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.492759979 podStartE2EDuration="6.996832141s" podCreationTimestamp="2025-12-03 06:02:03 +0000 UTC" firstStartedPulling="2025-12-03 06:02:05.044498847 +0000 UTC m=+1248.979959688" lastFinishedPulling="2025-12-03 06:02:08.548571009 +0000 UTC m=+1252.484031850" observedRunningTime="2025-12-03 06:02:09.988253835 +0000 UTC m=+1253.923714686" watchObservedRunningTime="2025-12-03 06:02:09.996832141 +0000 UTC m=+1253.932292982" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.008910 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e25ca83c-6778-4305-9439-0982c5271277","Type":"ContainerStarted","Data":"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3"} Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.009086 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-log" containerID="cri-o://587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487" gracePeriod=30 Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.009304 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-metadata" containerID="cri-o://3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3" gracePeriod=30 Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.009572 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e25ca83c-6778-4305-9439-0982c5271277","Type":"ContainerStarted","Data":"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487"} Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.011910 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.817071243 podStartE2EDuration="7.011897557s" podCreationTimestamp="2025-12-03 06:02:03 +0000 UTC" firstStartedPulling="2025-12-03 06:02:05.392204347 +0000 UTC m=+1249.327665188" lastFinishedPulling="2025-12-03 06:02:08.587030651 +0000 UTC m=+1252.522491502" observedRunningTime="2025-12-03 06:02:10.00476825 +0000 UTC m=+1253.940229091" watchObservedRunningTime="2025-12-03 06:02:10.011897557 +0000 UTC m=+1253.947358398" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.039263 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.396168798 podStartE2EDuration="7.039238507s" podCreationTimestamp="2025-12-03 06:02:03 +0000 UTC" firstStartedPulling="2025-12-03 06:02:04.941162038 +0000 UTC m=+1248.876622879" lastFinishedPulling="2025-12-03 06:02:08.584231727 +0000 UTC m=+1252.519692588" observedRunningTime="2025-12-03 06:02:10.030764744 +0000 UTC m=+1253.966225605" watchObservedRunningTime="2025-12-03 06:02:10.039238507 +0000 UTC m=+1253.974699338" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.700938 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.744988 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-config-data\") pod \"e25ca83c-6778-4305-9439-0982c5271277\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.745138 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-combined-ca-bundle\") pod \"e25ca83c-6778-4305-9439-0982c5271277\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.745220 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e25ca83c-6778-4305-9439-0982c5271277-logs\") pod \"e25ca83c-6778-4305-9439-0982c5271277\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.745570 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/e25ca83c-6778-4305-9439-0982c5271277-kube-api-access-t9zpz\") pod \"e25ca83c-6778-4305-9439-0982c5271277\" (UID: \"e25ca83c-6778-4305-9439-0982c5271277\") " Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.745813 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e25ca83c-6778-4305-9439-0982c5271277-logs" (OuterVolumeSpecName: "logs") pod "e25ca83c-6778-4305-9439-0982c5271277" (UID: "e25ca83c-6778-4305-9439-0982c5271277"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.746103 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e25ca83c-6778-4305-9439-0982c5271277-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.768063 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e25ca83c-6778-4305-9439-0982c5271277-kube-api-access-t9zpz" (OuterVolumeSpecName: "kube-api-access-t9zpz") pod "e25ca83c-6778-4305-9439-0982c5271277" (UID: "e25ca83c-6778-4305-9439-0982c5271277"). InnerVolumeSpecName "kube-api-access-t9zpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.785836 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e25ca83c-6778-4305-9439-0982c5271277" (UID: "e25ca83c-6778-4305-9439-0982c5271277"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.829190 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-config-data" (OuterVolumeSpecName: "config-data") pod "e25ca83c-6778-4305-9439-0982c5271277" (UID: "e25ca83c-6778-4305-9439-0982c5271277"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.851897 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.851943 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e25ca83c-6778-4305-9439-0982c5271277-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:10 crc kubenswrapper[4810]: I1203 06:02:10.851961 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/e25ca83c-6778-4305-9439-0982c5271277-kube-api-access-t9zpz\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.038234 4810 generic.go:334] "Generic (PLEG): container finished" podID="e25ca83c-6778-4305-9439-0982c5271277" containerID="3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3" exitCode=0 Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.038805 4810 generic.go:334] "Generic (PLEG): container finished" podID="e25ca83c-6778-4305-9439-0982c5271277" containerID="587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487" exitCode=143 Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.038323 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.038314 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e25ca83c-6778-4305-9439-0982c5271277","Type":"ContainerDied","Data":"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3"} Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.039681 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e25ca83c-6778-4305-9439-0982c5271277","Type":"ContainerDied","Data":"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487"} Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.039711 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e25ca83c-6778-4305-9439-0982c5271277","Type":"ContainerDied","Data":"1053b3b4dc7be5917e5f40132e75635850c4fb3d8d067f4ef8e155d566032fdd"} Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.039748 4810 scope.go:117] "RemoveContainer" containerID="3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.047417 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerStarted","Data":"bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103"} Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.081711 4810 scope.go:117] "RemoveContainer" containerID="587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.089011 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.123143 4810 scope.go:117] "RemoveContainer" containerID="3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3" Dec 03 06:02:11 crc kubenswrapper[4810]: E1203 06:02:11.126844 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3\": container with ID starting with 3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3 not found: ID does not exist" containerID="3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.126896 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3"} err="failed to get container status \"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3\": rpc error: code = NotFound desc = could not find container \"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3\": container with ID starting with 3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3 not found: ID does not exist" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.126923 4810 scope.go:117] "RemoveContainer" containerID="587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487" Dec 03 06:02:11 crc kubenswrapper[4810]: E1203 06:02:11.127423 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487\": container with ID starting with 587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487 not found: ID does not exist" containerID="587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.127477 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487"} err="failed to get container status \"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487\": rpc error: code = NotFound desc = could not find container \"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487\": container with ID starting with 587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487 not found: ID does not exist" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.127540 4810 scope.go:117] "RemoveContainer" containerID="3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.128220 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.128841 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3"} err="failed to get container status \"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3\": rpc error: code = NotFound desc = could not find container \"3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3\": container with ID starting with 3d08dfce236ca5777fce8464d6159f7464ffa31e4dfeac60ebf6f9201f4e17d3 not found: ID does not exist" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.128910 4810 scope.go:117] "RemoveContainer" containerID="587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.129492 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487"} err="failed to get container status \"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487\": rpc error: code = NotFound desc = could not find container \"587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487\": container with ID starting with 587e8726d075b5fd40ca60b79a8d909bd52f8547e283cb30d6b1b6d4ce648487 not found: ID does not exist" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.208581 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:11 crc kubenswrapper[4810]: E1203 06:02:11.214557 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-log" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.214597 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-log" Dec 03 06:02:11 crc kubenswrapper[4810]: E1203 06:02:11.214632 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-metadata" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.214639 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-metadata" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.215727 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-log" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.215783 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e25ca83c-6778-4305-9439-0982c5271277" containerName="nova-metadata-metadata" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.220495 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.224675 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.225159 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.244582 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.283679 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4b61805-8b2b-41fa-a352-ce6ac341afef-logs\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.283756 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.283791 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.283899 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6c5h\" (UniqueName: \"kubernetes.io/projected/f4b61805-8b2b-41fa-a352-ce6ac341afef-kube-api-access-v6c5h\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.283953 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-config-data\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.384825 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-config-data\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.384944 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4b61805-8b2b-41fa-a352-ce6ac341afef-logs\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.385587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4b61805-8b2b-41fa-a352-ce6ac341afef-logs\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.385659 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.385800 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.386572 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6c5h\" (UniqueName: \"kubernetes.io/projected/f4b61805-8b2b-41fa-a352-ce6ac341afef-kube-api-access-v6c5h\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.392924 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-config-data\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.411813 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.412296 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.417834 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6c5h\" (UniqueName: \"kubernetes.io/projected/f4b61805-8b2b-41fa-a352-ce6ac341afef-kube-api-access-v6c5h\") pod \"nova-metadata-0\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " pod="openstack/nova-metadata-0" Dec 03 06:02:11 crc kubenswrapper[4810]: I1203 06:02:11.550116 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:12 crc kubenswrapper[4810]: I1203 06:02:12.061713 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerStarted","Data":"ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a"} Dec 03 06:02:12 crc kubenswrapper[4810]: I1203 06:02:12.075686 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:12 crc kubenswrapper[4810]: I1203 06:02:12.399248 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e25ca83c-6778-4305-9439-0982c5271277" path="/var/lib/kubelet/pods/e25ca83c-6778-4305-9439-0982c5271277/volumes" Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.081921 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerStarted","Data":"1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6"} Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.083742 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.088231 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4b61805-8b2b-41fa-a352-ce6ac341afef","Type":"ContainerStarted","Data":"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314"} Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.088303 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4b61805-8b2b-41fa-a352-ce6ac341afef","Type":"ContainerStarted","Data":"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0"} Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.088322 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4b61805-8b2b-41fa-a352-ce6ac341afef","Type":"ContainerStarted","Data":"4073d2158d3cbf84f76f2457e3b3534c6f18d06fd32b73607bde94e8919633a5"} Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.121093 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.887655547 podStartE2EDuration="7.121068187s" podCreationTimestamp="2025-12-03 06:02:06 +0000 UTC" firstStartedPulling="2025-12-03 06:02:09.086684159 +0000 UTC m=+1253.022145000" lastFinishedPulling="2025-12-03 06:02:12.320096769 +0000 UTC m=+1256.255557640" observedRunningTime="2025-12-03 06:02:13.115282665 +0000 UTC m=+1257.050743526" watchObservedRunningTime="2025-12-03 06:02:13.121068187 +0000 UTC m=+1257.056529038" Dec 03 06:02:13 crc kubenswrapper[4810]: I1203 06:02:13.155981 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.155951425 podStartE2EDuration="2.155951425s" podCreationTimestamp="2025-12-03 06:02:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:13.148762716 +0000 UTC m=+1257.084223567" watchObservedRunningTime="2025-12-03 06:02:13.155951425 +0000 UTC m=+1257.091412276" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.265326 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.266826 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.293508 4810 generic.go:334] "Generic (PLEG): container finished" podID="ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" containerID="efe26805e0c97ed6a5ae468813450dd7baed46a0fda72b25fdcdabe45537f2e7" exitCode=0 Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.293832 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cl2k9" event={"ID":"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed","Type":"ContainerDied","Data":"efe26805e0c97ed6a5ae468813450dd7baed46a0fda72b25fdcdabe45537f2e7"} Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.483613 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.483668 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.534914 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.539456 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.684805 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9f47d9d6c-2s7rt"] Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.685173 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerName="dnsmasq-dns" containerID="cri-o://e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e" gracePeriod=10 Dec 03 06:02:14 crc kubenswrapper[4810]: I1203 06:02:14.719928 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.249298 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.307610 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-config\") pod \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.307653 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-nb\") pod \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.307839 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-svc\") pod \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.307890 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-sb\") pod \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.307940 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6tmp\" (UniqueName: \"kubernetes.io/projected/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-kube-api-access-h6tmp\") pod \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.307998 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-swift-storage-0\") pod \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\" (UID: \"eb3328f9-d31a-4534-a685-7aa3b0aad3f2\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.344996 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-kube-api-access-h6tmp" (OuterVolumeSpecName: "kube-api-access-h6tmp") pod "eb3328f9-d31a-4534-a685-7aa3b0aad3f2" (UID: "eb3328f9-d31a-4534-a685-7aa3b0aad3f2"). InnerVolumeSpecName "kube-api-access-h6tmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.346051 4810 generic.go:334] "Generic (PLEG): container finished" podID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerID="e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e" exitCode=0 Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.346325 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.346984 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.347102 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.347313 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" event={"ID":"eb3328f9-d31a-4534-a685-7aa3b0aad3f2","Type":"ContainerDied","Data":"e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e"} Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.347346 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f47d9d6c-2s7rt" event={"ID":"eb3328f9-d31a-4534-a685-7aa3b0aad3f2","Type":"ContainerDied","Data":"5258da383fbef9388db06cf17f5d5d898bf8dd7a2a25721a551f2d3e50efceb5"} Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.347365 4810 scope.go:117] "RemoveContainer" containerID="e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.394439 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eb3328f9-d31a-4534-a685-7aa3b0aad3f2" (UID: "eb3328f9-d31a-4534-a685-7aa3b0aad3f2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.400591 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb3328f9-d31a-4534-a685-7aa3b0aad3f2" (UID: "eb3328f9-d31a-4534-a685-7aa3b0aad3f2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.411594 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6tmp\" (UniqueName: \"kubernetes.io/projected/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-kube-api-access-h6tmp\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.411637 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.411651 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.442078 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eb3328f9-d31a-4534-a685-7aa3b0aad3f2" (UID: "eb3328f9-d31a-4534-a685-7aa3b0aad3f2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.442643 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.471377 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "eb3328f9-d31a-4534-a685-7aa3b0aad3f2" (UID: "eb3328f9-d31a-4534-a685-7aa3b0aad3f2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.472579 4810 scope.go:117] "RemoveContainer" containerID="af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.513528 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-config" (OuterVolumeSpecName: "config") pod "eb3328f9-d31a-4534-a685-7aa3b0aad3f2" (UID: "eb3328f9-d31a-4534-a685-7aa3b0aad3f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.513579 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.513960 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.520606 4810 scope.go:117] "RemoveContainer" containerID="e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e" Dec 03 06:02:15 crc kubenswrapper[4810]: E1203 06:02:15.522918 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e\": container with ID starting with e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e not found: ID does not exist" containerID="e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.523057 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e"} err="failed to get container status \"e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e\": rpc error: code = NotFound desc = could not find container \"e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e\": container with ID starting with e18b545a510c18eb3a3be976333fb062664024ba2f84e0a53e11d9b3c024646e not found: ID does not exist" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.523159 4810 scope.go:117] "RemoveContainer" containerID="af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5" Dec 03 06:02:15 crc kubenswrapper[4810]: E1203 06:02:15.524003 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5\": container with ID starting with af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5 not found: ID does not exist" containerID="af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.524055 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5"} err="failed to get container status \"af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5\": rpc error: code = NotFound desc = could not find container \"af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5\": container with ID starting with af5f8462aff0e90403bebbb1b78d6e8f22ba420d3552e4620f1b8f3e5f978bc5 not found: ID does not exist" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.616666 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3328f9-d31a-4534-a685-7aa3b0aad3f2-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.700201 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9f47d9d6c-2s7rt"] Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.716301 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9f47d9d6c-2s7rt"] Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.723425 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.920009 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-scripts\") pod \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.920123 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbbmd\" (UniqueName: \"kubernetes.io/projected/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-kube-api-access-hbbmd\") pod \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.920219 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-combined-ca-bundle\") pod \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.920244 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-config-data\") pod \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\" (UID: \"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed\") " Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.926699 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-scripts" (OuterVolumeSpecName: "scripts") pod "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" (UID: "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.927279 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-kube-api-access-hbbmd" (OuterVolumeSpecName: "kube-api-access-hbbmd") pod "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" (UID: "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed"). InnerVolumeSpecName "kube-api-access-hbbmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.955727 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-config-data" (OuterVolumeSpecName: "config-data") pod "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" (UID: "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:15 crc kubenswrapper[4810]: I1203 06:02:15.958107 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" (UID: "ae78cc18-4ba1-4b54-b0cd-a9569d2281ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.023012 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.023051 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbbmd\" (UniqueName: \"kubernetes.io/projected/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-kube-api-access-hbbmd\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.023065 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.023076 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.360169 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cl2k9" event={"ID":"ae78cc18-4ba1-4b54-b0cd-a9569d2281ed","Type":"ContainerDied","Data":"52af320b4332f0384fec22ef5a05a52d5763d3dd2620307ec4c598db5442f146"} Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.360261 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52af320b4332f0384fec22ef5a05a52d5763d3dd2620307ec4c598db5442f146" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.360196 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cl2k9" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.430952 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" path="/var/lib/kubelet/pods/eb3328f9-d31a-4534-a685-7aa3b0aad3f2/volumes" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.552861 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.555161 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.645096 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.677279 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.677567 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-log" containerID="cri-o://45139548c56abbff54c9467ff91e3ce8d73e1b9df81fc4e2c151559ff1fde590" gracePeriod=30 Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.677672 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-api" containerID="cri-o://810d402b7ecf1eb68c1e5927b914711e98e7f7c02784d742d35745f028b9b062" gracePeriod=30 Dec 03 06:02:16 crc kubenswrapper[4810]: I1203 06:02:16.717445 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:17 crc kubenswrapper[4810]: I1203 06:02:17.375562 4810 generic.go:334] "Generic (PLEG): container finished" podID="14aef582-d59c-4621-9c63-580006ddb5a5" containerID="45139548c56abbff54c9467ff91e3ce8d73e1b9df81fc4e2c151559ff1fde590" exitCode=143 Dec 03 06:02:17 crc kubenswrapper[4810]: I1203 06:02:17.375796 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14aef582-d59c-4621-9c63-580006ddb5a5","Type":"ContainerDied","Data":"45139548c56abbff54c9467ff91e3ce8d73e1b9df81fc4e2c151559ff1fde590"} Dec 03 06:02:17 crc kubenswrapper[4810]: I1203 06:02:17.376328 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="550d872e-2f7f-4f79-8a54-69360fcad3ab" containerName="nova-scheduler-scheduler" containerID="cri-o://333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" gracePeriod=30 Dec 03 06:02:18 crc kubenswrapper[4810]: I1203 06:02:18.389682 4810 generic.go:334] "Generic (PLEG): container finished" podID="dd75096a-f05e-412e-a147-cba8eb1474b9" containerID="51ec9842055502fc42451726abce9f64b172d32ae02e7a3e8f4ac3ce41b512e6" exitCode=0 Dec 03 06:02:18 crc kubenswrapper[4810]: I1203 06:02:18.390053 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-log" containerID="cri-o://6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0" gracePeriod=30 Dec 03 06:02:18 crc kubenswrapper[4810]: I1203 06:02:18.390776 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-metadata" containerID="cri-o://18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314" gracePeriod=30 Dec 03 06:02:18 crc kubenswrapper[4810]: I1203 06:02:18.404946 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" event={"ID":"dd75096a-f05e-412e-a147-cba8eb1474b9","Type":"ContainerDied","Data":"51ec9842055502fc42451726abce9f64b172d32ae02e7a3e8f4ac3ce41b512e6"} Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.030385 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.196501 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6c5h\" (UniqueName: \"kubernetes.io/projected/f4b61805-8b2b-41fa-a352-ce6ac341afef-kube-api-access-v6c5h\") pod \"f4b61805-8b2b-41fa-a352-ce6ac341afef\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.197627 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-combined-ca-bundle\") pod \"f4b61805-8b2b-41fa-a352-ce6ac341afef\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.197824 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-config-data\") pod \"f4b61805-8b2b-41fa-a352-ce6ac341afef\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.197882 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4b61805-8b2b-41fa-a352-ce6ac341afef-logs\") pod \"f4b61805-8b2b-41fa-a352-ce6ac341afef\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.197935 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-nova-metadata-tls-certs\") pod \"f4b61805-8b2b-41fa-a352-ce6ac341afef\" (UID: \"f4b61805-8b2b-41fa-a352-ce6ac341afef\") " Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.198339 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b61805-8b2b-41fa-a352-ce6ac341afef-logs" (OuterVolumeSpecName: "logs") pod "f4b61805-8b2b-41fa-a352-ce6ac341afef" (UID: "f4b61805-8b2b-41fa-a352-ce6ac341afef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.199368 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4b61805-8b2b-41fa-a352-ce6ac341afef-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.216593 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b61805-8b2b-41fa-a352-ce6ac341afef-kube-api-access-v6c5h" (OuterVolumeSpecName: "kube-api-access-v6c5h") pod "f4b61805-8b2b-41fa-a352-ce6ac341afef" (UID: "f4b61805-8b2b-41fa-a352-ce6ac341afef"). InnerVolumeSpecName "kube-api-access-v6c5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.236812 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-config-data" (OuterVolumeSpecName: "config-data") pod "f4b61805-8b2b-41fa-a352-ce6ac341afef" (UID: "f4b61805-8b2b-41fa-a352-ce6ac341afef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.240553 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4b61805-8b2b-41fa-a352-ce6ac341afef" (UID: "f4b61805-8b2b-41fa-a352-ce6ac341afef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.292925 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f4b61805-8b2b-41fa-a352-ce6ac341afef" (UID: "f4b61805-8b2b-41fa-a352-ce6ac341afef"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.302486 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.302525 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.302540 4810 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4b61805-8b2b-41fa-a352-ce6ac341afef-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.302557 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6c5h\" (UniqueName: \"kubernetes.io/projected/f4b61805-8b2b-41fa-a352-ce6ac341afef-kube-api-access-v6c5h\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.404424 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerID="18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314" exitCode=0 Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.404908 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerID="6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0" exitCode=143 Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.405122 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.405295 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4b61805-8b2b-41fa-a352-ce6ac341afef","Type":"ContainerDied","Data":"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314"} Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.405376 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4b61805-8b2b-41fa-a352-ce6ac341afef","Type":"ContainerDied","Data":"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0"} Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.405391 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4b61805-8b2b-41fa-a352-ce6ac341afef","Type":"ContainerDied","Data":"4073d2158d3cbf84f76f2457e3b3534c6f18d06fd32b73607bde94e8919633a5"} Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.405414 4810 scope.go:117] "RemoveContainer" containerID="18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.438337 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.447093 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.452211 4810 scope.go:117] "RemoveContainer" containerID="6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466030 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.466573 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerName="dnsmasq-dns" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466594 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerName="dnsmasq-dns" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.466609 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerName="init" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466617 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerName="init" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.466632 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" containerName="nova-manage" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466638 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" containerName="nova-manage" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.466656 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-log" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466662 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-log" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.466669 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-metadata" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466675 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-metadata" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466891 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-log" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466907 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" containerName="nova-metadata-metadata" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466918 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb3328f9-d31a-4534-a685-7aa3b0aad3f2" containerName="dnsmasq-dns" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.466927 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" containerName="nova-manage" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.467979 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.472884 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.473064 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.498426 4810 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.506515 4810 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.509646 4810 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.509692 4810 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="550d872e-2f7f-4f79-8a54-69360fcad3ab" containerName="nova-scheduler-scheduler" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.511492 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.561401 4810 scope.go:117] "RemoveContainer" containerID="18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.564170 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314\": container with ID starting with 18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314 not found: ID does not exist" containerID="18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.564255 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314"} err="failed to get container status \"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314\": rpc error: code = NotFound desc = could not find container \"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314\": container with ID starting with 18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314 not found: ID does not exist" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.564305 4810 scope.go:117] "RemoveContainer" containerID="6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0" Dec 03 06:02:19 crc kubenswrapper[4810]: E1203 06:02:19.564848 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0\": container with ID starting with 6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0 not found: ID does not exist" containerID="6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.564913 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0"} err="failed to get container status \"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0\": rpc error: code = NotFound desc = could not find container \"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0\": container with ID starting with 6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0 not found: ID does not exist" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.564956 4810 scope.go:117] "RemoveContainer" containerID="18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.565402 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314"} err="failed to get container status \"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314\": rpc error: code = NotFound desc = could not find container \"18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314\": container with ID starting with 18f5f1235ae62a25a402619f173ae70b0a3451116e686ecc3470ada3ace2e314 not found: ID does not exist" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.565443 4810 scope.go:117] "RemoveContainer" containerID="6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.565889 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0"} err="failed to get container status \"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0\": rpc error: code = NotFound desc = could not find container \"6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0\": container with ID starting with 6f53e5c57d194342277ae3c39f2c66a6fa78fd9e7dc01ffe1114f860e3efa4b0 not found: ID does not exist" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.610640 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-config-data\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.610717 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.610817 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0442d702-9c21-4eee-9f8c-7b1224adb4a7-logs\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.611207 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl7dh\" (UniqueName: \"kubernetes.io/projected/0442d702-9c21-4eee-9f8c-7b1224adb4a7-kube-api-access-rl7dh\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.611297 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.713390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.713519 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0442d702-9c21-4eee-9f8c-7b1224adb4a7-logs\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.713604 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl7dh\" (UniqueName: \"kubernetes.io/projected/0442d702-9c21-4eee-9f8c-7b1224adb4a7-kube-api-access-rl7dh\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.713635 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.713714 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-config-data\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.714375 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0442d702-9c21-4eee-9f8c-7b1224adb4a7-logs\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.719954 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.728752 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.733078 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-config-data\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.735215 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl7dh\" (UniqueName: \"kubernetes.io/projected/0442d702-9c21-4eee-9f8c-7b1224adb4a7-kube-api-access-rl7dh\") pod \"nova-metadata-0\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " pod="openstack/nova-metadata-0" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.825677 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:19 crc kubenswrapper[4810]: I1203 06:02:19.872104 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.019602 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-config-data\") pod \"dd75096a-f05e-412e-a147-cba8eb1474b9\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.020100 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl8sz\" (UniqueName: \"kubernetes.io/projected/dd75096a-f05e-412e-a147-cba8eb1474b9-kube-api-access-zl8sz\") pod \"dd75096a-f05e-412e-a147-cba8eb1474b9\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.020136 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-combined-ca-bundle\") pod \"dd75096a-f05e-412e-a147-cba8eb1474b9\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.020234 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-scripts\") pod \"dd75096a-f05e-412e-a147-cba8eb1474b9\" (UID: \"dd75096a-f05e-412e-a147-cba8eb1474b9\") " Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.026139 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd75096a-f05e-412e-a147-cba8eb1474b9-kube-api-access-zl8sz" (OuterVolumeSpecName: "kube-api-access-zl8sz") pod "dd75096a-f05e-412e-a147-cba8eb1474b9" (UID: "dd75096a-f05e-412e-a147-cba8eb1474b9"). InnerVolumeSpecName "kube-api-access-zl8sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.027143 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-scripts" (OuterVolumeSpecName: "scripts") pod "dd75096a-f05e-412e-a147-cba8eb1474b9" (UID: "dd75096a-f05e-412e-a147-cba8eb1474b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.048389 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-config-data" (OuterVolumeSpecName: "config-data") pod "dd75096a-f05e-412e-a147-cba8eb1474b9" (UID: "dd75096a-f05e-412e-a147-cba8eb1474b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.052861 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd75096a-f05e-412e-a147-cba8eb1474b9" (UID: "dd75096a-f05e-412e-a147-cba8eb1474b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.123351 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.123391 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl8sz\" (UniqueName: \"kubernetes.io/projected/dd75096a-f05e-412e-a147-cba8eb1474b9-kube-api-access-zl8sz\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.123401 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.123410 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd75096a-f05e-412e-a147-cba8eb1474b9-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.359705 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.388154 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b61805-8b2b-41fa-a352-ce6ac341afef" path="/var/lib/kubelet/pods/f4b61805-8b2b-41fa-a352-ce6ac341afef/volumes" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.446406 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" event={"ID":"dd75096a-f05e-412e-a147-cba8eb1474b9","Type":"ContainerDied","Data":"a56974ed678da2c507d4eeabd0bb845eb41d372be64574fb4492b2c46ef27543"} Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.446490 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a56974ed678da2c507d4eeabd0bb845eb41d372be64574fb4492b2c46ef27543" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.446651 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7ts2b" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.448118 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0442d702-9c21-4eee-9f8c-7b1224adb4a7","Type":"ContainerStarted","Data":"3e51569c6a27fc567c51accdcddba6eed5fbb10699e6665a2547b1d3f3b9cfe3"} Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.515772 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 06:02:20 crc kubenswrapper[4810]: E1203 06:02:20.516290 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd75096a-f05e-412e-a147-cba8eb1474b9" containerName="nova-cell1-conductor-db-sync" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.516310 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd75096a-f05e-412e-a147-cba8eb1474b9" containerName="nova-cell1-conductor-db-sync" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.516509 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd75096a-f05e-412e-a147-cba8eb1474b9" containerName="nova-cell1-conductor-db-sync" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.517268 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.524483 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.539939 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70eeea83-764d-4b0f-be6a-74c31a35c455-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.540047 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70eeea83-764d-4b0f-be6a-74c31a35c455-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.540079 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28g2x\" (UniqueName: \"kubernetes.io/projected/70eeea83-764d-4b0f-be6a-74c31a35c455-kube-api-access-28g2x\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.545432 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.642275 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70eeea83-764d-4b0f-be6a-74c31a35c455-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.642319 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28g2x\" (UniqueName: \"kubernetes.io/projected/70eeea83-764d-4b0f-be6a-74c31a35c455-kube-api-access-28g2x\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.642427 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70eeea83-764d-4b0f-be6a-74c31a35c455-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.649520 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70eeea83-764d-4b0f-be6a-74c31a35c455-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.651546 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70eeea83-764d-4b0f-be6a-74c31a35c455-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.661114 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28g2x\" (UniqueName: \"kubernetes.io/projected/70eeea83-764d-4b0f-be6a-74c31a35c455-kube-api-access-28g2x\") pod \"nova-cell1-conductor-0\" (UID: \"70eeea83-764d-4b0f-be6a-74c31a35c455\") " pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:20 crc kubenswrapper[4810]: I1203 06:02:20.886552 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.380541 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.480345 4810 generic.go:334] "Generic (PLEG): container finished" podID="550d872e-2f7f-4f79-8a54-69360fcad3ab" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" exitCode=0 Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.480408 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"550d872e-2f7f-4f79-8a54-69360fcad3ab","Type":"ContainerDied","Data":"333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c"} Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.480440 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"550d872e-2f7f-4f79-8a54-69360fcad3ab","Type":"ContainerDied","Data":"50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa"} Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.480460 4810 scope.go:117] "RemoveContainer" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.480598 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.496855 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0442d702-9c21-4eee-9f8c-7b1224adb4a7","Type":"ContainerStarted","Data":"bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32"} Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.496899 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0442d702-9c21-4eee-9f8c-7b1224adb4a7","Type":"ContainerStarted","Data":"1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a"} Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.515003 4810 scope.go:117] "RemoveContainer" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.515893 4810 generic.go:334] "Generic (PLEG): container finished" podID="14aef582-d59c-4621-9c63-580006ddb5a5" containerID="810d402b7ecf1eb68c1e5927b914711e98e7f7c02784d742d35745f028b9b062" exitCode=0 Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.515947 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14aef582-d59c-4621-9c63-580006ddb5a5","Type":"ContainerDied","Data":"810d402b7ecf1eb68c1e5927b914711e98e7f7c02784d742d35745f028b9b062"} Dec 03 06:02:21 crc kubenswrapper[4810]: E1203 06:02:21.518435 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c\": container with ID starting with 333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c not found: ID does not exist" containerID="333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.518484 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c"} err="failed to get container status \"333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c\": rpc error: code = NotFound desc = could not find container \"333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c\": container with ID starting with 333577b9ea5dd4ac29e25ec65fdf709e72702de320ebf5b3cbbf2bade219df5c not found: ID does not exist" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.538232 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.53821107 podStartE2EDuration="2.53821107s" podCreationTimestamp="2025-12-03 06:02:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:21.53520474 +0000 UTC m=+1265.470665581" watchObservedRunningTime="2025-12-03 06:02:21.53821107 +0000 UTC m=+1265.473671901" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.558685 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.566914 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-config-data\") pod \"550d872e-2f7f-4f79-8a54-69360fcad3ab\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.566975 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxnkc\" (UniqueName: \"kubernetes.io/projected/550d872e-2f7f-4f79-8a54-69360fcad3ab-kube-api-access-zxnkc\") pod \"550d872e-2f7f-4f79-8a54-69360fcad3ab\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.567097 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-combined-ca-bundle\") pod \"550d872e-2f7f-4f79-8a54-69360fcad3ab\" (UID: \"550d872e-2f7f-4f79-8a54-69360fcad3ab\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.583472 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/550d872e-2f7f-4f79-8a54-69360fcad3ab-kube-api-access-zxnkc" (OuterVolumeSpecName: "kube-api-access-zxnkc") pod "550d872e-2f7f-4f79-8a54-69360fcad3ab" (UID: "550d872e-2f7f-4f79-8a54-69360fcad3ab"). InnerVolumeSpecName "kube-api-access-zxnkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.608437 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-config-data" (OuterVolumeSpecName: "config-data") pod "550d872e-2f7f-4f79-8a54-69360fcad3ab" (UID: "550d872e-2f7f-4f79-8a54-69360fcad3ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.618554 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "550d872e-2f7f-4f79-8a54-69360fcad3ab" (UID: "550d872e-2f7f-4f79-8a54-69360fcad3ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.670526 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.670559 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxnkc\" (UniqueName: \"kubernetes.io/projected/550d872e-2f7f-4f79-8a54-69360fcad3ab-kube-api-access-zxnkc\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.670574 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550d872e-2f7f-4f79-8a54-69360fcad3ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.742501 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.772171 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-combined-ca-bundle\") pod \"14aef582-d59c-4621-9c63-580006ddb5a5\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.772508 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14aef582-d59c-4621-9c63-580006ddb5a5-logs\") pod \"14aef582-d59c-4621-9c63-580006ddb5a5\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.772562 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7l29\" (UniqueName: \"kubernetes.io/projected/14aef582-d59c-4621-9c63-580006ddb5a5-kube-api-access-c7l29\") pod \"14aef582-d59c-4621-9c63-580006ddb5a5\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.772595 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-config-data\") pod \"14aef582-d59c-4621-9c63-580006ddb5a5\" (UID: \"14aef582-d59c-4621-9c63-580006ddb5a5\") " Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.778194 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14aef582-d59c-4621-9c63-580006ddb5a5-logs" (OuterVolumeSpecName: "logs") pod "14aef582-d59c-4621-9c63-580006ddb5a5" (UID: "14aef582-d59c-4621-9c63-580006ddb5a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.781972 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14aef582-d59c-4621-9c63-580006ddb5a5-kube-api-access-c7l29" (OuterVolumeSpecName: "kube-api-access-c7l29") pod "14aef582-d59c-4621-9c63-580006ddb5a5" (UID: "14aef582-d59c-4621-9c63-580006ddb5a5"). InnerVolumeSpecName "kube-api-access-c7l29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.830341 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-config-data" (OuterVolumeSpecName: "config-data") pod "14aef582-d59c-4621-9c63-580006ddb5a5" (UID: "14aef582-d59c-4621-9c63-580006ddb5a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.847340 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14aef582-d59c-4621-9c63-580006ddb5a5" (UID: "14aef582-d59c-4621-9c63-580006ddb5a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.862009 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.875518 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14aef582-d59c-4621-9c63-580006ddb5a5-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.875559 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7l29\" (UniqueName: \"kubernetes.io/projected/14aef582-d59c-4621-9c63-580006ddb5a5-kube-api-access-c7l29\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.875574 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.875584 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14aef582-d59c-4621-9c63-580006ddb5a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.884903 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.895275 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:21 crc kubenswrapper[4810]: E1203 06:02:21.895896 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-api" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.895918 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-api" Dec 03 06:02:21 crc kubenswrapper[4810]: E1203 06:02:21.895949 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="550d872e-2f7f-4f79-8a54-69360fcad3ab" containerName="nova-scheduler-scheduler" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.895956 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="550d872e-2f7f-4f79-8a54-69360fcad3ab" containerName="nova-scheduler-scheduler" Dec 03 06:02:21 crc kubenswrapper[4810]: E1203 06:02:21.895987 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-log" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.895993 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-log" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.896183 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-log" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.896197 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" containerName="nova-api-api" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.896222 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="550d872e-2f7f-4f79-8a54-69360fcad3ab" containerName="nova-scheduler-scheduler" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.897048 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.900761 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.904456 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.977528 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.977582 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vchrg\" (UniqueName: \"kubernetes.io/projected/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-kube-api-access-vchrg\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:21 crc kubenswrapper[4810]: I1203 06:02:21.977614 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-config-data\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.079513 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.080616 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vchrg\" (UniqueName: \"kubernetes.io/projected/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-kube-api-access-vchrg\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.080769 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-config-data\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.085666 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-config-data\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.086062 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.104331 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vchrg\" (UniqueName: \"kubernetes.io/projected/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-kube-api-access-vchrg\") pod \"nova-scheduler-0\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.213621 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.395069 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="550d872e-2f7f-4f79-8a54-69360fcad3ab" path="/var/lib/kubelet/pods/550d872e-2f7f-4f79-8a54-69360fcad3ab/volumes" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.532304 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.532302 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14aef582-d59c-4621-9c63-580006ddb5a5","Type":"ContainerDied","Data":"373381f07d955bc86144915095988de80fe0ee0be7b1daf783ac299c9e7512be"} Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.532491 4810 scope.go:117] "RemoveContainer" containerID="810d402b7ecf1eb68c1e5927b914711e98e7f7c02784d742d35745f028b9b062" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.539804 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"70eeea83-764d-4b0f-be6a-74c31a35c455","Type":"ContainerStarted","Data":"68d8c2722d769ae61974113f98068b36352a7a2c85a9066340ecfa9ea10dd2d4"} Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.539879 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"70eeea83-764d-4b0f-be6a-74c31a35c455","Type":"ContainerStarted","Data":"9a7b24582dd9fa3bd7aae5742ac1dfa152935c9d29b17d07945c1b98beb94904"} Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.563642 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.563613593 podStartE2EDuration="2.563613593s" podCreationTimestamp="2025-12-03 06:02:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:22.559191267 +0000 UTC m=+1266.494652108" watchObservedRunningTime="2025-12-03 06:02:22.563613593 +0000 UTC m=+1266.499074444" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.624971 4810 scope.go:117] "RemoveContainer" containerID="45139548c56abbff54c9467ff91e3ce8d73e1b9df81fc4e2c151559ff1fde590" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.627668 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.650631 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.660640 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.662617 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.667040 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.672646 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.706813 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-logs\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.706960 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98v44\" (UniqueName: \"kubernetes.io/projected/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-kube-api-access-98v44\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.707138 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.707575 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-config-data\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.763719 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.811155 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-config-data\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.811257 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-logs\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.811295 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98v44\" (UniqueName: \"kubernetes.io/projected/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-kube-api-access-98v44\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.811340 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.812206 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-logs\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.815640 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-config-data\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.819929 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.842584 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98v44\" (UniqueName: \"kubernetes.io/projected/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-kube-api-access-98v44\") pod \"nova-api-0\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " pod="openstack/nova-api-0" Dec 03 06:02:22 crc kubenswrapper[4810]: I1203 06:02:22.982939 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:23 crc kubenswrapper[4810]: I1203 06:02:23.547762 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:23 crc kubenswrapper[4810]: I1203 06:02:23.557357 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5","Type":"ContainerStarted","Data":"068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d"} Dec 03 06:02:23 crc kubenswrapper[4810]: I1203 06:02:23.557414 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5","Type":"ContainerStarted","Data":"ab09f947074f03a3e489854c8e20215b5d024448735d0c803a4735a87edff75f"} Dec 03 06:02:23 crc kubenswrapper[4810]: I1203 06:02:23.564296 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:23 crc kubenswrapper[4810]: I1203 06:02:23.588768 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.58872637 podStartE2EDuration="2.58872637s" podCreationTimestamp="2025-12-03 06:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:23.57505435 +0000 UTC m=+1267.510515201" watchObservedRunningTime="2025-12-03 06:02:23.58872637 +0000 UTC m=+1267.524187221" Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.394495 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14aef582-d59c-4621-9c63-580006ddb5a5" path="/var/lib/kubelet/pods/14aef582-d59c-4621-9c63-580006ddb5a5/volumes" Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.581713 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3","Type":"ContainerStarted","Data":"f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b"} Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.581808 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3","Type":"ContainerStarted","Data":"db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473"} Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.581833 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3","Type":"ContainerStarted","Data":"7587b796c6c003370f2c54523fc07c95b5d7879b43571bb723d414a17f8c1b4c"} Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.621568 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.621540739 podStartE2EDuration="2.621540739s" podCreationTimestamp="2025-12-03 06:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:24.609995605 +0000 UTC m=+1268.545456456" watchObservedRunningTime="2025-12-03 06:02:24.621540739 +0000 UTC m=+1268.557001620" Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.872869 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 06:02:24 crc kubenswrapper[4810]: I1203 06:02:24.874301 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 06:02:27 crc kubenswrapper[4810]: I1203 06:02:27.214155 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 06:02:29 crc kubenswrapper[4810]: I1203 06:02:29.872553 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 06:02:29 crc kubenswrapper[4810]: I1203 06:02:29.873132 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 06:02:30 crc kubenswrapper[4810]: I1203 06:02:30.892974 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 06:02:30 crc kubenswrapper[4810]: I1203 06:02:30.893002 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 06:02:30 crc kubenswrapper[4810]: I1203 06:02:30.940818 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 06:02:32 crc kubenswrapper[4810]: I1203 06:02:32.213855 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 06:02:32 crc kubenswrapper[4810]: I1203 06:02:32.268776 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 06:02:32 crc kubenswrapper[4810]: I1203 06:02:32.729573 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 06:02:32 crc kubenswrapper[4810]: I1203 06:02:32.984123 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:02:32 crc kubenswrapper[4810]: I1203 06:02:32.984196 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:02:34 crc kubenswrapper[4810]: I1203 06:02:34.067050 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 06:02:34 crc kubenswrapper[4810]: I1203 06:02:34.068046 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 06:02:37 crc kubenswrapper[4810]: I1203 06:02:37.259305 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 06:02:39 crc kubenswrapper[4810]: I1203 06:02:39.880184 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 06:02:39 crc kubenswrapper[4810]: I1203 06:02:39.891157 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 06:02:39 crc kubenswrapper[4810]: I1203 06:02:39.893648 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 06:02:39 crc kubenswrapper[4810]: E1203 06:02:39.985630 4810 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550d872e_2f7f_4f79_8a54_69360fcad3ab.slice/crio-50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa: Error finding container 50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa: Status 404 returned error can't find the container with id 50bbce0bc33a89af2b2ee46531b9e748d6d92e9c25506f7bd247b9c1abe34aaa Dec 03 06:02:40 crc kubenswrapper[4810]: E1203 06:02:40.346247 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bde4997_caf1_41d4_8f41_7555a94311b2.slice/crio-920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bde4997_caf1_41d4_8f41_7555a94311b2.slice/crio-conmon-920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067.scope\": RecentStats: unable to find data in memory cache]" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.509269 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.689311 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgvzc\" (UniqueName: \"kubernetes.io/projected/1bde4997-caf1-41d4-8f41-7555a94311b2-kube-api-access-wgvzc\") pod \"1bde4997-caf1-41d4-8f41-7555a94311b2\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.689667 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-config-data\") pod \"1bde4997-caf1-41d4-8f41-7555a94311b2\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.689722 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-combined-ca-bundle\") pod \"1bde4997-caf1-41d4-8f41-7555a94311b2\" (UID: \"1bde4997-caf1-41d4-8f41-7555a94311b2\") " Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.700106 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bde4997-caf1-41d4-8f41-7555a94311b2-kube-api-access-wgvzc" (OuterVolumeSpecName: "kube-api-access-wgvzc") pod "1bde4997-caf1-41d4-8f41-7555a94311b2" (UID: "1bde4997-caf1-41d4-8f41-7555a94311b2"). InnerVolumeSpecName "kube-api-access-wgvzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.725938 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1bde4997-caf1-41d4-8f41-7555a94311b2" (UID: "1bde4997-caf1-41d4-8f41-7555a94311b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.734588 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-config-data" (OuterVolumeSpecName: "config-data") pod "1bde4997-caf1-41d4-8f41-7555a94311b2" (UID: "1bde4997-caf1-41d4-8f41-7555a94311b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.789558 4810 generic.go:334] "Generic (PLEG): container finished" podID="1bde4997-caf1-41d4-8f41-7555a94311b2" containerID="920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067" exitCode=137 Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.789659 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1bde4997-caf1-41d4-8f41-7555a94311b2","Type":"ContainerDied","Data":"920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067"} Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.790079 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1bde4997-caf1-41d4-8f41-7555a94311b2","Type":"ContainerDied","Data":"3fc5c79d2ae82d800b4d05fa12b42a34ec4d34e4580c4fae4883cd46b29d77b1"} Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.790121 4810 scope.go:117] "RemoveContainer" containerID="920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.789684 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.792069 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgvzc\" (UniqueName: \"kubernetes.io/projected/1bde4997-caf1-41d4-8f41-7555a94311b2-kube-api-access-wgvzc\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.792091 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.792104 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bde4997-caf1-41d4-8f41-7555a94311b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.804662 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.845032 4810 scope.go:117] "RemoveContainer" containerID="920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067" Dec 03 06:02:40 crc kubenswrapper[4810]: E1203 06:02:40.845528 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067\": container with ID starting with 920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067 not found: ID does not exist" containerID="920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.845580 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067"} err="failed to get container status \"920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067\": rpc error: code = NotFound desc = could not find container \"920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067\": container with ID starting with 920c776845de19ef69e6effa32057560a94bc91e7159cdb5f68c142b5028c067 not found: ID does not exist" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.879299 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.918204 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.938815 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:40 crc kubenswrapper[4810]: E1203 06:02:40.939422 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bde4997-caf1-41d4-8f41-7555a94311b2" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.939443 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bde4997-caf1-41d4-8f41-7555a94311b2" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.939663 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bde4997-caf1-41d4-8f41-7555a94311b2" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.940540 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.947152 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.947318 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.947394 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 06:02:40 crc kubenswrapper[4810]: I1203 06:02:40.962251 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.099990 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.100051 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.100247 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.100479 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57zmc\" (UniqueName: \"kubernetes.io/projected/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-kube-api-access-57zmc\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.100532 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.202696 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.202876 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57zmc\" (UniqueName: \"kubernetes.io/projected/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-kube-api-access-57zmc\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.202930 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.202997 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.203019 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.209084 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.210658 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.211058 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.212537 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.235331 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57zmc\" (UniqueName: \"kubernetes.io/projected/35ba7fd0-41b1-4669-8cb7-5538b4ef5492-kube-api-access-57zmc\") pod \"nova-cell1-novncproxy-0\" (UID: \"35ba7fd0-41b1-4669-8cb7-5538b4ef5492\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.275342 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.767716 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 06:02:41 crc kubenswrapper[4810]: W1203 06:02:41.769272 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35ba7fd0_41b1_4669_8cb7_5538b4ef5492.slice/crio-7729fcd7e86fa071858dce53a9571ed5212b9045a0bc23c6bb09e3a5f95e13e5 WatchSource:0}: Error finding container 7729fcd7e86fa071858dce53a9571ed5212b9045a0bc23c6bb09e3a5f95e13e5: Status 404 returned error can't find the container with id 7729fcd7e86fa071858dce53a9571ed5212b9045a0bc23c6bb09e3a5f95e13e5 Dec 03 06:02:41 crc kubenswrapper[4810]: I1203 06:02:41.802612 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"35ba7fd0-41b1-4669-8cb7-5538b4ef5492","Type":"ContainerStarted","Data":"7729fcd7e86fa071858dce53a9571ed5212b9045a0bc23c6bb09e3a5f95e13e5"} Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.373053 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.374653 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" containerName="kube-state-metrics" containerID="cri-o://26135b25a697df12edb2fac87680236617396581660551e65868d5ae03578c46" gracePeriod=30 Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.407462 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bde4997-caf1-41d4-8f41-7555a94311b2" path="/var/lib/kubelet/pods/1bde4997-caf1-41d4-8f41-7555a94311b2/volumes" Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.823247 4810 generic.go:334] "Generic (PLEG): container finished" podID="790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" containerID="26135b25a697df12edb2fac87680236617396581660551e65868d5ae03578c46" exitCode=2 Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.823368 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad","Type":"ContainerDied","Data":"26135b25a697df12edb2fac87680236617396581660551e65868d5ae03578c46"} Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.823534 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad","Type":"ContainerDied","Data":"99e842efd559639cd582dc6f1f737a95cdf0556be38427e1dcdd82f2f583592b"} Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.823705 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99e842efd559639cd582dc6f1f737a95cdf0556be38427e1dcdd82f2f583592b" Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.828320 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"35ba7fd0-41b1-4669-8cb7-5538b4ef5492","Type":"ContainerStarted","Data":"91d2dc9043c3a85a41b8881659e94a63d0bba3d43c2402914ab203e19c92da89"} Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.873835 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.873803307 podStartE2EDuration="2.873803307s" podCreationTimestamp="2025-12-03 06:02:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:42.859923562 +0000 UTC m=+1286.795384413" watchObservedRunningTime="2025-12-03 06:02:42.873803307 +0000 UTC m=+1286.809264148" Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.906655 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.990866 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.991390 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 06:02:42 crc kubenswrapper[4810]: I1203 06:02:42.991585 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.002516 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.074593 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bbtw\" (UniqueName: \"kubernetes.io/projected/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad-kube-api-access-7bbtw\") pod \"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad\" (UID: \"790a0398-c3e3-4070-9ce3-7ecbf8b2bcad\") " Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.081431 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad-kube-api-access-7bbtw" (OuterVolumeSpecName: "kube-api-access-7bbtw") pod "790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" (UID: "790a0398-c3e3-4070-9ce3-7ecbf8b2bcad"). InnerVolumeSpecName "kube-api-access-7bbtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.177373 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bbtw\" (UniqueName: \"kubernetes.io/projected/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad-kube-api-access-7bbtw\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.840278 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.842552 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.861410 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.942600 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.957467 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.971784 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 06:02:43 crc kubenswrapper[4810]: E1203 06:02:43.972540 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" containerName="kube-state-metrics" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.972570 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" containerName="kube-state-metrics" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.972892 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" containerName="kube-state-metrics" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.973983 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.975900 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.976201 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 06:02:43 crc kubenswrapper[4810]: I1203 06:02:43.982647 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.081940 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79785b5f-skdbm"] Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.084039 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.108793 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79785b5f-skdbm"] Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109124 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-config\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109225 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-svc\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109334 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109371 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b88c9\" (UniqueName: \"kubernetes.io/projected/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-kube-api-access-b88c9\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109402 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109432 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-nb\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109459 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsjst\" (UniqueName: \"kubernetes.io/projected/8d045016-8932-4293-9f53-71663d354934-kube-api-access-lsjst\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109537 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109599 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-swift-storage-0\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.109652 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-sb\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214392 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214451 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-nb\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214471 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsjst\" (UniqueName: \"kubernetes.io/projected/8d045016-8932-4293-9f53-71663d354934-kube-api-access-lsjst\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214503 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214542 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-swift-storage-0\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214579 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-sb\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214636 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-config\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214692 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-svc\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214773 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.214809 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b88c9\" (UniqueName: \"kubernetes.io/projected/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-kube-api-access-b88c9\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.216424 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-swift-storage-0\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.216584 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-config\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.217051 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-nb\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.217645 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-sb\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.221971 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.223690 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-svc\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.226493 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.243575 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8d045016-8932-4293-9f53-71663d354934-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.247327 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b88c9\" (UniqueName: \"kubernetes.io/projected/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-kube-api-access-b88c9\") pod \"dnsmasq-dns-79785b5f-skdbm\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.260052 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsjst\" (UniqueName: \"kubernetes.io/projected/8d045016-8932-4293-9f53-71663d354934-kube-api-access-lsjst\") pod \"kube-state-metrics-0\" (UID: \"8d045016-8932-4293-9f53-71663d354934\") " pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.296950 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.394998 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="790a0398-c3e3-4070-9ce3-7ecbf8b2bcad" path="/var/lib/kubelet/pods/790a0398-c3e3-4070-9ce3-7ecbf8b2bcad/volumes" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.408274 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.830835 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.839252 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.852791 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8d045016-8932-4293-9f53-71663d354934","Type":"ContainerStarted","Data":"4f1d6a3ce1fca20d6c01a6f7e8fb9f2791c357d45cfc4fc0ff1833ef68d87e61"} Dec 03 06:02:44 crc kubenswrapper[4810]: I1203 06:02:44.981057 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79785b5f-skdbm"] Dec 03 06:02:44 crc kubenswrapper[4810]: W1203 06:02:44.986622 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19bdbb15_10b3_4eb4_8e7f_ffe5d8fa2180.slice/crio-cfb07630bd51d6d658d49223304d899855bf419981f5dcc73d2e1f51eef235a5 WatchSource:0}: Error finding container cfb07630bd51d6d658d49223304d899855bf419981f5dcc73d2e1f51eef235a5: Status 404 returned error can't find the container with id cfb07630bd51d6d658d49223304d899855bf419981f5dcc73d2e1f51eef235a5 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.493283 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.494426 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-central-agent" containerID="cri-o://bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c" gracePeriod=30 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.494722 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="proxy-httpd" containerID="cri-o://1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6" gracePeriod=30 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.494880 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="sg-core" containerID="cri-o://ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a" gracePeriod=30 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.495003 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-notification-agent" containerID="cri-o://bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103" gracePeriod=30 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.866132 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8d045016-8932-4293-9f53-71663d354934","Type":"ContainerStarted","Data":"711914048ea9f4cdd19597eca5a533b99739bf70b03c8e3281fe1ae3bd619a3c"} Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.867484 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.869298 4810 generic.go:334] "Generic (PLEG): container finished" podID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerID="0d06bdc0a3823df2fe72e6b526e030e90514dd22b59d171bd1ce4223f53c3584" exitCode=0 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.869425 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79785b5f-skdbm" event={"ID":"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180","Type":"ContainerDied","Data":"0d06bdc0a3823df2fe72e6b526e030e90514dd22b59d171bd1ce4223f53c3584"} Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.869499 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79785b5f-skdbm" event={"ID":"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180","Type":"ContainerStarted","Data":"cfb07630bd51d6d658d49223304d899855bf419981f5dcc73d2e1f51eef235a5"} Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.881977 4810 generic.go:334] "Generic (PLEG): container finished" podID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerID="1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6" exitCode=0 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.882024 4810 generic.go:334] "Generic (PLEG): container finished" podID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerID="ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a" exitCode=2 Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.882886 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerDied","Data":"1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6"} Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.882999 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerDied","Data":"ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a"} Dec 03 06:02:45 crc kubenswrapper[4810]: I1203 06:02:45.896398 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.515948947 podStartE2EDuration="2.896368518s" podCreationTimestamp="2025-12-03 06:02:43 +0000 UTC" firstStartedPulling="2025-12-03 06:02:44.83891934 +0000 UTC m=+1288.774380191" lastFinishedPulling="2025-12-03 06:02:45.219338931 +0000 UTC m=+1289.154799762" observedRunningTime="2025-12-03 06:02:45.885908492 +0000 UTC m=+1289.821369333" watchObservedRunningTime="2025-12-03 06:02:45.896368518 +0000 UTC m=+1289.831829359" Dec 03 06:02:46 crc kubenswrapper[4810]: I1203 06:02:46.275861 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:46 crc kubenswrapper[4810]: I1203 06:02:46.896317 4810 generic.go:334] "Generic (PLEG): container finished" podID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerID="bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c" exitCode=0 Dec 03 06:02:46 crc kubenswrapper[4810]: I1203 06:02:46.896403 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerDied","Data":"bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c"} Dec 03 06:02:46 crc kubenswrapper[4810]: I1203 06:02:46.901323 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79785b5f-skdbm" event={"ID":"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180","Type":"ContainerStarted","Data":"d71cf6b485e138d3d7e7e1ceb87a65ea1f85faae53fa3ecf910ddad7674c3230"} Dec 03 06:02:46 crc kubenswrapper[4810]: I1203 06:02:46.932912 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79785b5f-skdbm" podStartSLOduration=2.932884605 podStartE2EDuration="2.932884605s" podCreationTimestamp="2025-12-03 06:02:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:46.92282979 +0000 UTC m=+1290.858290641" watchObservedRunningTime="2025-12-03 06:02:46.932884605 +0000 UTC m=+1290.868345446" Dec 03 06:02:47 crc kubenswrapper[4810]: I1203 06:02:47.124640 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:47 crc kubenswrapper[4810]: I1203 06:02:47.125010 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-log" containerID="cri-o://db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473" gracePeriod=30 Dec 03 06:02:47 crc kubenswrapper[4810]: I1203 06:02:47.125143 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-api" containerID="cri-o://f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b" gracePeriod=30 Dec 03 06:02:47 crc kubenswrapper[4810]: I1203 06:02:47.915256 4810 generic.go:334] "Generic (PLEG): container finished" podID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerID="db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473" exitCode=143 Dec 03 06:02:47 crc kubenswrapper[4810]: I1203 06:02:47.915355 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3","Type":"ContainerDied","Data":"db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473"} Dec 03 06:02:47 crc kubenswrapper[4810]: I1203 06:02:47.916289 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.837529 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.963202 4810 generic.go:334] "Generic (PLEG): container finished" podID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerID="f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b" exitCode=0 Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.963838 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3","Type":"ContainerDied","Data":"f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b"} Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.963896 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3","Type":"ContainerDied","Data":"7587b796c6c003370f2c54523fc07c95b5d7879b43571bb723d414a17f8c1b4c"} Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.963932 4810 scope.go:117] "RemoveContainer" containerID="f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b" Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.964162 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:50 crc kubenswrapper[4810]: I1203 06:02:50.996899 4810 scope.go:117] "RemoveContainer" containerID="db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.021148 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98v44\" (UniqueName: \"kubernetes.io/projected/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-kube-api-access-98v44\") pod \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.021667 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-logs\") pod \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.021975 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-combined-ca-bundle\") pod \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.022006 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-logs" (OuterVolumeSpecName: "logs") pod "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" (UID: "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.022066 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-config-data\") pod \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\" (UID: \"b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3\") " Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.023821 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.028672 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-kube-api-access-98v44" (OuterVolumeSpecName: "kube-api-access-98v44") pod "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" (UID: "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3"). InnerVolumeSpecName "kube-api-access-98v44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.029001 4810 scope.go:117] "RemoveContainer" containerID="f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b" Dec 03 06:02:51 crc kubenswrapper[4810]: E1203 06:02:51.029604 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b\": container with ID starting with f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b not found: ID does not exist" containerID="f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.029670 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b"} err="failed to get container status \"f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b\": rpc error: code = NotFound desc = could not find container \"f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b\": container with ID starting with f2e92be7ee3786d860a49970e6a2f9e2a71ed347d924e6adcba1b898026bca6b not found: ID does not exist" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.029711 4810 scope.go:117] "RemoveContainer" containerID="db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473" Dec 03 06:02:51 crc kubenswrapper[4810]: E1203 06:02:51.030385 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473\": container with ID starting with db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473 not found: ID does not exist" containerID="db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.030420 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473"} err="failed to get container status \"db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473\": rpc error: code = NotFound desc = could not find container \"db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473\": container with ID starting with db7c49ed851506d2abefcf52a721f0f133dd85b132747527ea0660068f960473 not found: ID does not exist" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.052025 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" (UID: "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.062263 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-config-data" (OuterVolumeSpecName: "config-data") pod "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" (UID: "b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.125723 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98v44\" (UniqueName: \"kubernetes.io/projected/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-kube-api-access-98v44\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.125783 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.125796 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.275979 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.296651 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.302002 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.308392 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.329076 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:51 crc kubenswrapper[4810]: E1203 06:02:51.329592 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-api" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.329613 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-api" Dec 03 06:02:51 crc kubenswrapper[4810]: E1203 06:02:51.329626 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-log" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.329635 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-log" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.329844 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-api" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.329874 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" containerName="nova-api-log" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.330988 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.336462 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.336980 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.341987 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.346883 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.431530 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.431635 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl24w\" (UniqueName: \"kubernetes.io/projected/eb279876-b9b7-4958-8bd0-5a1a2779fa16-kube-api-access-cl24w\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.432018 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb279876-b9b7-4958-8bd0-5a1a2779fa16-logs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.432159 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-public-tls-certs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.432848 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-config-data\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.433075 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.535384 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.535466 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.535506 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl24w\" (UniqueName: \"kubernetes.io/projected/eb279876-b9b7-4958-8bd0-5a1a2779fa16-kube-api-access-cl24w\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.535553 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb279876-b9b7-4958-8bd0-5a1a2779fa16-logs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.535582 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-public-tls-certs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.535632 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-config-data\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.537057 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb279876-b9b7-4958-8bd0-5a1a2779fa16-logs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.542150 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-public-tls-certs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.543227 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.548866 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.557968 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-config-data\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.564482 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl24w\" (UniqueName: \"kubernetes.io/projected/eb279876-b9b7-4958-8bd0-5a1a2779fa16-kube-api-access-cl24w\") pod \"nova-api-0\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " pod="openstack/nova-api-0" Dec 03 06:02:51 crc kubenswrapper[4810]: I1203 06:02:51.647166 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.014477 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.152340 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.247434 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-f9dsl"] Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.249433 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.255406 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.255550 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.258981 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-f9dsl"] Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.351445 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbztz\" (UniqueName: \"kubernetes.io/projected/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-kube-api-access-jbztz\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.351838 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-scripts\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.351904 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-config-data\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.351982 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.404577 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3" path="/var/lib/kubelet/pods/b5c50c2d-33ef-4a05-a8ba-1bee99e4f6b3/volumes" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.454359 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-config-data\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.454472 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.454511 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbztz\" (UniqueName: \"kubernetes.io/projected/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-kube-api-access-jbztz\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.454566 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-scripts\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.459477 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.459845 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-scripts\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.461148 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-config-data\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.473803 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbztz\" (UniqueName: \"kubernetes.io/projected/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-kube-api-access-jbztz\") pod \"nova-cell1-cell-mapping-f9dsl\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.605809 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.994721 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb279876-b9b7-4958-8bd0-5a1a2779fa16","Type":"ContainerStarted","Data":"051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57"} Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.995190 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb279876-b9b7-4958-8bd0-5a1a2779fa16","Type":"ContainerStarted","Data":"bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46"} Dec 03 06:02:52 crc kubenswrapper[4810]: I1203 06:02:52.995207 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb279876-b9b7-4958-8bd0-5a1a2779fa16","Type":"ContainerStarted","Data":"f8de4c745934e66e435537af4518805afb3fb4f4bededb4e27fdf29bc3946429"} Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.161041 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.161009951 podStartE2EDuration="2.161009951s" podCreationTimestamp="2025-12-03 06:02:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:53.02151338 +0000 UTC m=+1296.956974261" watchObservedRunningTime="2025-12-03 06:02:53.161009951 +0000 UTC m=+1297.096470812" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.170478 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-f9dsl"] Dec 03 06:02:53 crc kubenswrapper[4810]: W1203 06:02:53.234892 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e60e2fe_0566_4bc6_bfaf_f1bb33e2216c.slice/crio-b137722f920cd03c1068bbd9640bbb2ba6fb8d2e386d1947b8cd07f0100b045b WatchSource:0}: Error finding container b137722f920cd03c1068bbd9640bbb2ba6fb8d2e386d1947b8cd07f0100b045b: Status 404 returned error can't find the container with id b137722f920cd03c1068bbd9640bbb2ba6fb8d2e386d1947b8cd07f0100b045b Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.650914 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.803798 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-log-httpd\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.804344 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-combined-ca-bundle\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.804547 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrr69\" (UniqueName: \"kubernetes.io/projected/204e9c97-7971-463d-8674-3f2cf51a4a97-kube-api-access-mrr69\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.804598 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-run-httpd\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.804628 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-config-data\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.804680 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-scripts\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.804704 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-sg-core-conf-yaml\") pod \"204e9c97-7971-463d-8674-3f2cf51a4a97\" (UID: \"204e9c97-7971-463d-8674-3f2cf51a4a97\") " Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.813091 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/204e9c97-7971-463d-8674-3f2cf51a4a97-kube-api-access-mrr69" (OuterVolumeSpecName: "kube-api-access-mrr69") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "kube-api-access-mrr69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.813555 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.816777 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.820539 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-scripts" (OuterVolumeSpecName: "scripts") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.861379 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.908653 4810 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.908690 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrr69\" (UniqueName: \"kubernetes.io/projected/204e9c97-7971-463d-8674-3f2cf51a4a97-kube-api-access-mrr69\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.908705 4810 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/204e9c97-7971-463d-8674-3f2cf51a4a97-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.908715 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.908723 4810 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.925507 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:53 crc kubenswrapper[4810]: I1203 06:02:53.956009 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-config-data" (OuterVolumeSpecName: "config-data") pod "204e9c97-7971-463d-8674-3f2cf51a4a97" (UID: "204e9c97-7971-463d-8674-3f2cf51a4a97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.004874 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f9dsl" event={"ID":"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c","Type":"ContainerStarted","Data":"9cac7fa0e9c4445c5e6a45e87197a4fa2f6c3a5d8f1e803c7de595cc29e792f3"} Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.004932 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f9dsl" event={"ID":"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c","Type":"ContainerStarted","Data":"b137722f920cd03c1068bbd9640bbb2ba6fb8d2e386d1947b8cd07f0100b045b"} Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.008555 4810 generic.go:334] "Generic (PLEG): container finished" podID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerID="bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103" exitCode=0 Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.008651 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerDied","Data":"bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103"} Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.008703 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.008753 4810 scope.go:117] "RemoveContainer" containerID="1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.008724 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"204e9c97-7971-463d-8674-3f2cf51a4a97","Type":"ContainerDied","Data":"464ffe44d3ed9841e552f8cf9766d94208dc7b6681dc66fa118fc0e6d79e9876"} Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.010241 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.010263 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204e9c97-7971-463d-8674-3f2cf51a4a97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.035165 4810 scope.go:117] "RemoveContainer" containerID="ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.035274 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-f9dsl" podStartSLOduration=2.035244387 podStartE2EDuration="2.035244387s" podCreationTimestamp="2025-12-03 06:02:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:02:54.02814757 +0000 UTC m=+1297.963608411" watchObservedRunningTime="2025-12-03 06:02:54.035244387 +0000 UTC m=+1297.970705228" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.067595 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.068500 4810 scope.go:117] "RemoveContainer" containerID="bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.086519 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.108686 4810 scope.go:117] "RemoveContainer" containerID="bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.110514 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.111251 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="proxy-httpd" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111286 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="proxy-httpd" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.111326 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-central-agent" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111339 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-central-agent" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.111388 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="sg-core" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111401 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="sg-core" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.111436 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-notification-agent" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111448 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-notification-agent" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111790 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-central-agent" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111831 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="proxy-httpd" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111865 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="sg-core" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.111892 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" containerName="ceilometer-notification-agent" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.115288 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.119306 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.119522 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.119674 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.128522 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.164956 4810 scope.go:117] "RemoveContainer" containerID="1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.165628 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6\": container with ID starting with 1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6 not found: ID does not exist" containerID="1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.165660 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6"} err="failed to get container status \"1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6\": rpc error: code = NotFound desc = could not find container \"1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6\": container with ID starting with 1256360b68c22fc367089d3ca5344ded01fe10003e2caea5eb8b41a1fd9e10e6 not found: ID does not exist" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.165688 4810 scope.go:117] "RemoveContainer" containerID="ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.166257 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a\": container with ID starting with ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a not found: ID does not exist" containerID="ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.166280 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a"} err="failed to get container status \"ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a\": rpc error: code = NotFound desc = could not find container \"ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a\": container with ID starting with ff6aedba9160bddbe6c097241a0471c8de0272e1ea7ebe17960592a2a05b0f3a not found: ID does not exist" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.166297 4810 scope.go:117] "RemoveContainer" containerID="bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.166799 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103\": container with ID starting with bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103 not found: ID does not exist" containerID="bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.166824 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103"} err="failed to get container status \"bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103\": rpc error: code = NotFound desc = could not find container \"bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103\": container with ID starting with bc0b20ebcb23f9e671c0c10d969ba94aa274e75b1de5b6bb3448456eb6e3d103 not found: ID does not exist" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.166839 4810 scope.go:117] "RemoveContainer" containerID="bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c" Dec 03 06:02:54 crc kubenswrapper[4810]: E1203 06:02:54.167162 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c\": container with ID starting with bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c not found: ID does not exist" containerID="bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.167188 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c"} err="failed to get container status \"bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c\": rpc error: code = NotFound desc = could not find container \"bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c\": container with ID starting with bcd7585715783ffdbd9c41b6d21a4d3a244bf13629c425d64ac52dfb3800566c not found: ID does not exist" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216084 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216167 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-log-httpd\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216202 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216489 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-scripts\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216544 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zlds\" (UniqueName: \"kubernetes.io/projected/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-kube-api-access-4zlds\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216653 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216720 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-config-data\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.216762 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-run-httpd\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.313286 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318511 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-config-data\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318564 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-run-httpd\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318670 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318722 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-log-httpd\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318817 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318855 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-scripts\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318884 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zlds\" (UniqueName: \"kubernetes.io/projected/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-kube-api-access-4zlds\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.318935 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.319670 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-log-httpd\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.319914 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-run-httpd\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.324081 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.325018 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.326442 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-scripts\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.333299 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.341588 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zlds\" (UniqueName: \"kubernetes.io/projected/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-kube-api-access-4zlds\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.352539 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e439d29-4442-41b5-94ba-a8fb7f77d5f0-config-data\") pod \"ceilometer-0\" (UID: \"8e439d29-4442-41b5-94ba-a8fb7f77d5f0\") " pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.409493 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="204e9c97-7971-463d-8674-3f2cf51a4a97" path="/var/lib/kubelet/pods/204e9c97-7971-463d-8674-3f2cf51a4a97/volumes" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.411069 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.450501 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.505660 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74cfbb9557-lp28d"] Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.505969 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="dnsmasq-dns" containerID="cri-o://08d78a127662ee8894f99424aa6413dc00b552a6e0b6ceb6e9760a083026c269" gracePeriod=10 Dec 03 06:02:54 crc kubenswrapper[4810]: I1203 06:02:54.534719 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.183:5353: connect: connection refused" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.005527 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.039639 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e439d29-4442-41b5-94ba-a8fb7f77d5f0","Type":"ContainerStarted","Data":"50633e494dd5a5ade836d162f1ddfbc9fc0efbd20eb07cef0730c1e4352d401b"} Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.045278 4810 generic.go:334] "Generic (PLEG): container finished" podID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerID="08d78a127662ee8894f99424aa6413dc00b552a6e0b6ceb6e9760a083026c269" exitCode=0 Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.045345 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" event={"ID":"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314","Type":"ContainerDied","Data":"08d78a127662ee8894f99424aa6413dc00b552a6e0b6ceb6e9760a083026c269"} Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.179810 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.345137 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-sb\") pod \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.346271 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-svc\") pod \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.346638 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-nb\") pod \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.346774 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd8ds\" (UniqueName: \"kubernetes.io/projected/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-kube-api-access-zd8ds\") pod \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.346918 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-swift-storage-0\") pod \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.347008 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-config\") pod \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\" (UID: \"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314\") " Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.354095 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-kube-api-access-zd8ds" (OuterVolumeSpecName: "kube-api-access-zd8ds") pod "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" (UID: "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314"). InnerVolumeSpecName "kube-api-access-zd8ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.410969 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" (UID: "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.439122 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" (UID: "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.445422 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" (UID: "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.450959 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.451004 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.451018 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.451033 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd8ds\" (UniqueName: \"kubernetes.io/projected/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-kube-api-access-zd8ds\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.452912 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-config" (OuterVolumeSpecName: "config") pod "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" (UID: "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.462000 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" (UID: "0e6ce4c8-496c-49bc-a0b0-d4ac129f3314"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.553350 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:55 crc kubenswrapper[4810]: I1203 06:02:55.553392 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.069414 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" event={"ID":"0e6ce4c8-496c-49bc-a0b0-d4ac129f3314","Type":"ContainerDied","Data":"2514345e6d86e9e562e76909f0c8d8ac915fd44ffa70c8daec41a7263c4d8d80"} Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.069479 4810 scope.go:117] "RemoveContainer" containerID="08d78a127662ee8894f99424aa6413dc00b552a6e0b6ceb6e9760a083026c269" Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.069672 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cfbb9557-lp28d" Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.079522 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e439d29-4442-41b5-94ba-a8fb7f77d5f0","Type":"ContainerStarted","Data":"92645fd6c8c1d2056d1eeee0a9180c3883e97c96361f25d4e0a573be7733ab9b"} Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.079552 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e439d29-4442-41b5-94ba-a8fb7f77d5f0","Type":"ContainerStarted","Data":"49477451cefd207a31c9e5a87331ee42b1b88ad6919b3ddd71a6c43409abe8f8"} Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.110079 4810 scope.go:117] "RemoveContainer" containerID="ecf6347a1ecd086e444627a27ad6f527d6afca2b7b3b72e7ca28d15c7aacc3e0" Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.118922 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74cfbb9557-lp28d"] Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.130171 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74cfbb9557-lp28d"] Dec 03 06:02:56 crc kubenswrapper[4810]: I1203 06:02:56.397222 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" path="/var/lib/kubelet/pods/0e6ce4c8-496c-49bc-a0b0-d4ac129f3314/volumes" Dec 03 06:02:57 crc kubenswrapper[4810]: I1203 06:02:57.094073 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e439d29-4442-41b5-94ba-a8fb7f77d5f0","Type":"ContainerStarted","Data":"b562e714526d7da8272e65e6679f8bf4414337b199ea9f55202cb7af0d9ddb87"} Dec 03 06:02:59 crc kubenswrapper[4810]: I1203 06:02:59.142727 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e439d29-4442-41b5-94ba-a8fb7f77d5f0","Type":"ContainerStarted","Data":"1d11a63e455f3e2e032159310f9d3cc4a96932818870ad141ee5a9680d1cb318"} Dec 03 06:02:59 crc kubenswrapper[4810]: I1203 06:02:59.143646 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 06:02:59 crc kubenswrapper[4810]: I1203 06:02:59.198380 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.340645593 podStartE2EDuration="5.198350439s" podCreationTimestamp="2025-12-03 06:02:54 +0000 UTC" firstStartedPulling="2025-12-03 06:02:55.007542967 +0000 UTC m=+1298.943003818" lastFinishedPulling="2025-12-03 06:02:57.865247823 +0000 UTC m=+1301.800708664" observedRunningTime="2025-12-03 06:02:59.171167724 +0000 UTC m=+1303.106628595" watchObservedRunningTime="2025-12-03 06:02:59.198350439 +0000 UTC m=+1303.133811310" Dec 03 06:03:00 crc kubenswrapper[4810]: I1203 06:03:00.159084 4810 generic.go:334] "Generic (PLEG): container finished" podID="5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" containerID="9cac7fa0e9c4445c5e6a45e87197a4fa2f6c3a5d8f1e803c7de595cc29e792f3" exitCode=0 Dec 03 06:03:00 crc kubenswrapper[4810]: I1203 06:03:00.160544 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f9dsl" event={"ID":"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c","Type":"ContainerDied","Data":"9cac7fa0e9c4445c5e6a45e87197a4fa2f6c3a5d8f1e803c7de595cc29e792f3"} Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.647989 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.649013 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.692955 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.868618 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-scripts\") pod \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.869311 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbztz\" (UniqueName: \"kubernetes.io/projected/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-kube-api-access-jbztz\") pod \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.869548 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-combined-ca-bundle\") pod \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.869640 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-config-data\") pod \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\" (UID: \"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c\") " Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.888664 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-scripts" (OuterVolumeSpecName: "scripts") pod "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" (UID: "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.888932 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-kube-api-access-jbztz" (OuterVolumeSpecName: "kube-api-access-jbztz") pod "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" (UID: "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c"). InnerVolumeSpecName "kube-api-access-jbztz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.915162 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-config-data" (OuterVolumeSpecName: "config-data") pod "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" (UID: "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.951344 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" (UID: "5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.973679 4810 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.973716 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbztz\" (UniqueName: \"kubernetes.io/projected/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-kube-api-access-jbztz\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.973743 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:01 crc kubenswrapper[4810]: I1203 06:03:01.973754 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.199079 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f9dsl" event={"ID":"5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c","Type":"ContainerDied","Data":"b137722f920cd03c1068bbd9640bbb2ba6fb8d2e386d1947b8cd07f0100b045b"} Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.199130 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b137722f920cd03c1068bbd9640bbb2ba6fb8d2e386d1947b8cd07f0100b045b" Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.199165 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f9dsl" Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.408123 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.408346 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-log" containerID="cri-o://bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46" gracePeriod=30 Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.408703 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-api" containerID="cri-o://051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57" gracePeriod=30 Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.412154 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.412423 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" containerName="nova-scheduler-scheduler" containerID="cri-o://068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d" gracePeriod=30 Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.435200 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.195:8774/\": EOF" Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.435231 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.195:8774/\": EOF" Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.435297 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.435597 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-log" containerID="cri-o://1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a" gracePeriod=30 Dec 03 06:03:02 crc kubenswrapper[4810]: I1203 06:03:02.436123 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-metadata" containerID="cri-o://bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32" gracePeriod=30 Dec 03 06:03:03 crc kubenswrapper[4810]: I1203 06:03:03.217424 4810 generic.go:334] "Generic (PLEG): container finished" podID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerID="bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46" exitCode=143 Dec 03 06:03:03 crc kubenswrapper[4810]: I1203 06:03:03.217494 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb279876-b9b7-4958-8bd0-5a1a2779fa16","Type":"ContainerDied","Data":"bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46"} Dec 03 06:03:03 crc kubenswrapper[4810]: I1203 06:03:03.222123 4810 generic.go:334] "Generic (PLEG): container finished" podID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerID="1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a" exitCode=143 Dec 03 06:03:03 crc kubenswrapper[4810]: I1203 06:03:03.222170 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0442d702-9c21-4eee-9f8c-7b1224adb4a7","Type":"ContainerDied","Data":"1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a"} Dec 03 06:03:05 crc kubenswrapper[4810]: I1203 06:03:05.586685 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": read tcp 10.217.0.2:46058->10.217.0.188:8775: read: connection reset by peer" Dec 03 06:03:05 crc kubenswrapper[4810]: I1203 06:03:05.587930 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": read tcp 10.217.0.2:46064->10.217.0.188:8775: read: connection reset by peer" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.138577 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.179429 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-nova-metadata-tls-certs\") pod \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.179581 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-combined-ca-bundle\") pod \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.179631 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0442d702-9c21-4eee-9f8c-7b1224adb4a7-logs\") pod \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.182689 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0442d702-9c21-4eee-9f8c-7b1224adb4a7-logs" (OuterVolumeSpecName: "logs") pod "0442d702-9c21-4eee-9f8c-7b1224adb4a7" (UID: "0442d702-9c21-4eee-9f8c-7b1224adb4a7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.233554 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0442d702-9c21-4eee-9f8c-7b1224adb4a7" (UID: "0442d702-9c21-4eee-9f8c-7b1224adb4a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.264478 4810 generic.go:334] "Generic (PLEG): container finished" podID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerID="bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32" exitCode=0 Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.264540 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0442d702-9c21-4eee-9f8c-7b1224adb4a7","Type":"ContainerDied","Data":"bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32"} Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.264587 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0442d702-9c21-4eee-9f8c-7b1224adb4a7","Type":"ContainerDied","Data":"3e51569c6a27fc567c51accdcddba6eed5fbb10699e6665a2547b1d3f3b9cfe3"} Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.264621 4810 scope.go:117] "RemoveContainer" containerID="bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.265136 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "0442d702-9c21-4eee-9f8c-7b1224adb4a7" (UID: "0442d702-9c21-4eee-9f8c-7b1224adb4a7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.265471 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.284075 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl7dh\" (UniqueName: \"kubernetes.io/projected/0442d702-9c21-4eee-9f8c-7b1224adb4a7-kube-api-access-rl7dh\") pod \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.284204 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-config-data\") pod \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\" (UID: \"0442d702-9c21-4eee-9f8c-7b1224adb4a7\") " Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.285644 4810 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.285697 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.285716 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0442d702-9c21-4eee-9f8c-7b1224adb4a7-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.290479 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0442d702-9c21-4eee-9f8c-7b1224adb4a7-kube-api-access-rl7dh" (OuterVolumeSpecName: "kube-api-access-rl7dh") pod "0442d702-9c21-4eee-9f8c-7b1224adb4a7" (UID: "0442d702-9c21-4eee-9f8c-7b1224adb4a7"). InnerVolumeSpecName "kube-api-access-rl7dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.330665 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-config-data" (OuterVolumeSpecName: "config-data") pod "0442d702-9c21-4eee-9f8c-7b1224adb4a7" (UID: "0442d702-9c21-4eee-9f8c-7b1224adb4a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.334349 4810 scope.go:117] "RemoveContainer" containerID="1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.362636 4810 scope.go:117] "RemoveContainer" containerID="bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32" Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.363326 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32\": container with ID starting with bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32 not found: ID does not exist" containerID="bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.363370 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32"} err="failed to get container status \"bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32\": rpc error: code = NotFound desc = could not find container \"bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32\": container with ID starting with bd4cc5a8f0ae55313588f90cc58e584624dbb73fbc8700bb27bdff5b0dd00c32 not found: ID does not exist" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.363403 4810 scope.go:117] "RemoveContainer" containerID="1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a" Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.366699 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a\": container with ID starting with 1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a not found: ID does not exist" containerID="1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.366764 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a"} err="failed to get container status \"1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a\": rpc error: code = NotFound desc = could not find container \"1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a\": container with ID starting with 1010cb42052872fc2f8d84b32bf7ffda05d096001b61eb65d680c784fa77525a not found: ID does not exist" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.389045 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl7dh\" (UniqueName: \"kubernetes.io/projected/0442d702-9c21-4eee-9f8c-7b1224adb4a7-kube-api-access-rl7dh\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.389084 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0442d702-9c21-4eee-9f8c-7b1224adb4a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.608866 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.628274 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.637366 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.638004 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-log" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638021 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-log" Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.638051 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-metadata" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638057 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-metadata" Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.638068 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" containerName="nova-manage" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638074 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" containerName="nova-manage" Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.638087 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="dnsmasq-dns" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638094 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="dnsmasq-dns" Dec 03 06:03:06 crc kubenswrapper[4810]: E1203 06:03:06.638122 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="init" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638127 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="init" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638349 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-metadata" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638378 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" containerName="nova-metadata-log" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638388 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6ce4c8-496c-49bc-a0b0-d4ac129f3314" containerName="dnsmasq-dns" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.638404 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" containerName="nova-manage" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.639840 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.643002 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.643159 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.655909 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.800364 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-config-data\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.800453 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.800706 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.800785 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e0323d-ca57-4a7e-a883-35da97b7e9d7-logs\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.800832 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qshft\" (UniqueName: \"kubernetes.io/projected/75e0323d-ca57-4a7e-a883-35da97b7e9d7-kube-api-access-qshft\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.851957 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.902933 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.903014 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e0323d-ca57-4a7e-a883-35da97b7e9d7-logs\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.903055 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qshft\" (UniqueName: \"kubernetes.io/projected/75e0323d-ca57-4a7e-a883-35da97b7e9d7-kube-api-access-qshft\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.903137 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-config-data\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.903163 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.904341 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e0323d-ca57-4a7e-a883-35da97b7e9d7-logs\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.912953 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.922163 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.925055 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e0323d-ca57-4a7e-a883-35da97b7e9d7-config-data\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.925628 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qshft\" (UniqueName: \"kubernetes.io/projected/75e0323d-ca57-4a7e-a883-35da97b7e9d7-kube-api-access-qshft\") pod \"nova-metadata-0\" (UID: \"75e0323d-ca57-4a7e-a883-35da97b7e9d7\") " pod="openstack/nova-metadata-0" Dec 03 06:03:06 crc kubenswrapper[4810]: I1203 06:03:06.976647 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.005966 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vchrg\" (UniqueName: \"kubernetes.io/projected/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-kube-api-access-vchrg\") pod \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.006804 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-combined-ca-bundle\") pod \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.006894 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-config-data\") pod \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\" (UID: \"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5\") " Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.011968 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-kube-api-access-vchrg" (OuterVolumeSpecName: "kube-api-access-vchrg") pod "baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" (UID: "baa66b65-3bb9-42a1-be0f-7dd21cc0efb5"). InnerVolumeSpecName "kube-api-access-vchrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.050104 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" (UID: "baa66b65-3bb9-42a1-be0f-7dd21cc0efb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.050302 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-config-data" (OuterVolumeSpecName: "config-data") pod "baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" (UID: "baa66b65-3bb9-42a1-be0f-7dd21cc0efb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.110262 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.110304 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.110315 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vchrg\" (UniqueName: \"kubernetes.io/projected/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5-kube-api-access-vchrg\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.278795 4810 generic.go:334] "Generic (PLEG): container finished" podID="baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" containerID="068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d" exitCode=0 Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.278854 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5","Type":"ContainerDied","Data":"068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d"} Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.278916 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"baa66b65-3bb9-42a1-be0f-7dd21cc0efb5","Type":"ContainerDied","Data":"ab09f947074f03a3e489854c8e20215b5d024448735d0c803a4735a87edff75f"} Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.278943 4810 scope.go:117] "RemoveContainer" containerID="068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.280112 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.325460 4810 scope.go:117] "RemoveContainer" containerID="068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d" Dec 03 06:03:07 crc kubenswrapper[4810]: E1203 06:03:07.326847 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d\": container with ID starting with 068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d not found: ID does not exist" containerID="068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.326904 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d"} err="failed to get container status \"068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d\": rpc error: code = NotFound desc = could not find container \"068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d\": container with ID starting with 068be7b7ba0cfa177d96a9348f0939c123c99172b589b086fc8ff4b8d858ae8d not found: ID does not exist" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.339973 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.346387 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.358132 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:03:07 crc kubenswrapper[4810]: E1203 06:03:07.358859 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" containerName="nova-scheduler-scheduler" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.358893 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" containerName="nova-scheduler-scheduler" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.359205 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" containerName="nova-scheduler-scheduler" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.360334 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.365107 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.367848 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.427150 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788c2be7-daf0-4cb1-9d7e-0f351e348603-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.427208 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788c2be7-daf0-4cb1-9d7e-0f351e348603-config-data\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.427310 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssqb6\" (UniqueName: \"kubernetes.io/projected/788c2be7-daf0-4cb1-9d7e-0f351e348603-kube-api-access-ssqb6\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.522496 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.531993 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssqb6\" (UniqueName: \"kubernetes.io/projected/788c2be7-daf0-4cb1-9d7e-0f351e348603-kube-api-access-ssqb6\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.532201 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788c2be7-daf0-4cb1-9d7e-0f351e348603-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.532263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788c2be7-daf0-4cb1-9d7e-0f351e348603-config-data\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.540525 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788c2be7-daf0-4cb1-9d7e-0f351e348603-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.542105 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788c2be7-daf0-4cb1-9d7e-0f351e348603-config-data\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.548647 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssqb6\" (UniqueName: \"kubernetes.io/projected/788c2be7-daf0-4cb1-9d7e-0f351e348603-kube-api-access-ssqb6\") pod \"nova-scheduler-0\" (UID: \"788c2be7-daf0-4cb1-9d7e-0f351e348603\") " pod="openstack/nova-scheduler-0" Dec 03 06:03:07 crc kubenswrapper[4810]: I1203 06:03:07.687269 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.238472 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 06:03:08 crc kubenswrapper[4810]: W1203 06:03:08.246424 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod788c2be7_daf0_4cb1_9d7e_0f351e348603.slice/crio-f92dcf3c516eb10d1ec5ef1c0274fa5413c97a5aa4ce9e3c5d3e3c7b4e45821c WatchSource:0}: Error finding container f92dcf3c516eb10d1ec5ef1c0274fa5413c97a5aa4ce9e3c5d3e3c7b4e45821c: Status 404 returned error can't find the container with id f92dcf3c516eb10d1ec5ef1c0274fa5413c97a5aa4ce9e3c5d3e3c7b4e45821c Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.275920 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.312746 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"788c2be7-daf0-4cb1-9d7e-0f351e348603","Type":"ContainerStarted","Data":"f92dcf3c516eb10d1ec5ef1c0274fa5413c97a5aa4ce9e3c5d3e3c7b4e45821c"} Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.315920 4810 generic.go:334] "Generic (PLEG): container finished" podID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerID="051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57" exitCode=0 Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.316048 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.316093 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb279876-b9b7-4958-8bd0-5a1a2779fa16","Type":"ContainerDied","Data":"051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57"} Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.316147 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb279876-b9b7-4958-8bd0-5a1a2779fa16","Type":"ContainerDied","Data":"f8de4c745934e66e435537af4518805afb3fb4f4bededb4e27fdf29bc3946429"} Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.316170 4810 scope.go:117] "RemoveContainer" containerID="051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.361298 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75e0323d-ca57-4a7e-a883-35da97b7e9d7","Type":"ContainerStarted","Data":"7bd7285489c70cf161ee6cf3c8bbc33140ce65c24f30b9e09110bd92ecca55d0"} Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.361360 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75e0323d-ca57-4a7e-a883-35da97b7e9d7","Type":"ContainerStarted","Data":"550c529968abd5b135363674642e5b5ed162fac0c7308293d20ccfe130117980"} Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.361381 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75e0323d-ca57-4a7e-a883-35da97b7e9d7","Type":"ContainerStarted","Data":"c71699d472765a18b9163215928696617968edf2c95ce147b4f7e26e75f9eaca"} Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.393254 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.393227784 podStartE2EDuration="2.393227784s" podCreationTimestamp="2025-12-03 06:03:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:03:08.385305415 +0000 UTC m=+1312.320766266" watchObservedRunningTime="2025-12-03 06:03:08.393227784 +0000 UTC m=+1312.328688625" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.395029 4810 scope.go:117] "RemoveContainer" containerID="bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.412599 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0442d702-9c21-4eee-9f8c-7b1224adb4a7" path="/var/lib/kubelet/pods/0442d702-9c21-4eee-9f8c-7b1224adb4a7/volumes" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.413316 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baa66b65-3bb9-42a1-be0f-7dd21cc0efb5" path="/var/lib/kubelet/pods/baa66b65-3bb9-42a1-be0f-7dd21cc0efb5/volumes" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.433062 4810 scope.go:117] "RemoveContainer" containerID="051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57" Dec 03 06:03:08 crc kubenswrapper[4810]: E1203 06:03:08.433714 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57\": container with ID starting with 051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57 not found: ID does not exist" containerID="051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.433780 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57"} err="failed to get container status \"051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57\": rpc error: code = NotFound desc = could not find container \"051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57\": container with ID starting with 051d1285316fd0318cc6d28e8b65c3e40e0e022bc56415e0cc4f09625ed25f57 not found: ID does not exist" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.433817 4810 scope.go:117] "RemoveContainer" containerID="bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46" Dec 03 06:03:08 crc kubenswrapper[4810]: E1203 06:03:08.434250 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46\": container with ID starting with bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46 not found: ID does not exist" containerID="bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.434275 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46"} err="failed to get container status \"bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46\": rpc error: code = NotFound desc = could not find container \"bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46\": container with ID starting with bf1a9eb4fb8a556a526bac00aab59d127cf32c7e75e362b37dbf8f564cd1dd46 not found: ID does not exist" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.451928 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-config-data\") pod \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.452146 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-internal-tls-certs\") pod \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.452204 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-public-tls-certs\") pod \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.452948 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl24w\" (UniqueName: \"kubernetes.io/projected/eb279876-b9b7-4958-8bd0-5a1a2779fa16-kube-api-access-cl24w\") pod \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.453022 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-combined-ca-bundle\") pod \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.453150 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb279876-b9b7-4958-8bd0-5a1a2779fa16-logs\") pod \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\" (UID: \"eb279876-b9b7-4958-8bd0-5a1a2779fa16\") " Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.454104 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb279876-b9b7-4958-8bd0-5a1a2779fa16-logs" (OuterVolumeSpecName: "logs") pod "eb279876-b9b7-4958-8bd0-5a1a2779fa16" (UID: "eb279876-b9b7-4958-8bd0-5a1a2779fa16"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.458434 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb279876-b9b7-4958-8bd0-5a1a2779fa16-kube-api-access-cl24w" (OuterVolumeSpecName: "kube-api-access-cl24w") pod "eb279876-b9b7-4958-8bd0-5a1a2779fa16" (UID: "eb279876-b9b7-4958-8bd0-5a1a2779fa16"). InnerVolumeSpecName "kube-api-access-cl24w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.482525 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb279876-b9b7-4958-8bd0-5a1a2779fa16" (UID: "eb279876-b9b7-4958-8bd0-5a1a2779fa16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.485107 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-config-data" (OuterVolumeSpecName: "config-data") pod "eb279876-b9b7-4958-8bd0-5a1a2779fa16" (UID: "eb279876-b9b7-4958-8bd0-5a1a2779fa16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.522694 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "eb279876-b9b7-4958-8bd0-5a1a2779fa16" (UID: "eb279876-b9b7-4958-8bd0-5a1a2779fa16"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.556243 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.556278 4810 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.556292 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl24w\" (UniqueName: \"kubernetes.io/projected/eb279876-b9b7-4958-8bd0-5a1a2779fa16-kube-api-access-cl24w\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.556302 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.556311 4810 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb279876-b9b7-4958-8bd0-5a1a2779fa16-logs\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.560767 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "eb279876-b9b7-4958-8bd0-5a1a2779fa16" (UID: "eb279876-b9b7-4958-8bd0-5a1a2779fa16"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.659788 4810 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb279876-b9b7-4958-8bd0-5a1a2779fa16-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.663039 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.690994 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.716722 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 06:03:08 crc kubenswrapper[4810]: E1203 06:03:08.717601 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-log" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.717637 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-log" Dec 03 06:03:08 crc kubenswrapper[4810]: E1203 06:03:08.717686 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-api" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.717703 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-api" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.718163 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-api" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.718206 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" containerName="nova-api-log" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.720184 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.723207 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.723646 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.723935 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.736252 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.760422 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-logs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.760485 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-public-tls-certs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.760509 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-config-data\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.760530 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.760612 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn8zw\" (UniqueName: \"kubernetes.io/projected/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-kube-api-access-zn8zw\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.760691 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.862726 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn8zw\" (UniqueName: \"kubernetes.io/projected/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-kube-api-access-zn8zw\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.862893 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.863012 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-logs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.863115 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-public-tls-certs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.863162 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-config-data\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.863204 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.863599 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-logs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.868112 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.869566 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-config-data\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.888558 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.888876 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-public-tls-certs\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:08 crc kubenswrapper[4810]: I1203 06:03:08.892114 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn8zw\" (UniqueName: \"kubernetes.io/projected/ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c-kube-api-access-zn8zw\") pod \"nova-api-0\" (UID: \"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c\") " pod="openstack/nova-api-0" Dec 03 06:03:09 crc kubenswrapper[4810]: I1203 06:03:09.064982 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 06:03:09 crc kubenswrapper[4810]: I1203 06:03:09.408461 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"788c2be7-daf0-4cb1-9d7e-0f351e348603","Type":"ContainerStarted","Data":"ded94928fe569cfc67dd83a57fc2332ef046bac842477be862ff7555b34e9c17"} Dec 03 06:03:09 crc kubenswrapper[4810]: I1203 06:03:09.428424 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 06:03:09 crc kubenswrapper[4810]: I1203 06:03:09.436204 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.436186251 podStartE2EDuration="2.436186251s" podCreationTimestamp="2025-12-03 06:03:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:03:09.42929636 +0000 UTC m=+1313.364757211" watchObservedRunningTime="2025-12-03 06:03:09.436186251 +0000 UTC m=+1313.371647102" Dec 03 06:03:09 crc kubenswrapper[4810]: W1203 06:03:09.463524 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae02eee1_9f2a_48c3_8b5e_79e4e6b5bc3c.slice/crio-fd61cfe0fb6d84d9ceef9999105511fd59e1d63cf6c7be2337536c18ccdb647a WatchSource:0}: Error finding container fd61cfe0fb6d84d9ceef9999105511fd59e1d63cf6c7be2337536c18ccdb647a: Status 404 returned error can't find the container with id fd61cfe0fb6d84d9ceef9999105511fd59e1d63cf6c7be2337536c18ccdb647a Dec 03 06:03:10 crc kubenswrapper[4810]: I1203 06:03:10.392269 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb279876-b9b7-4958-8bd0-5a1a2779fa16" path="/var/lib/kubelet/pods/eb279876-b9b7-4958-8bd0-5a1a2779fa16/volumes" Dec 03 06:03:10 crc kubenswrapper[4810]: I1203 06:03:10.449712 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c","Type":"ContainerStarted","Data":"55101cae0c7d5ef15abcbac82cc79b40942d708e834ad9a9ea6116f608fe4ae2"} Dec 03 06:03:10 crc kubenswrapper[4810]: I1203 06:03:10.449830 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c","Type":"ContainerStarted","Data":"6784cf29e4a8bbf8f07512f3cec8752ec7a020b7090a0d5fa0a6bdb21666ed02"} Dec 03 06:03:10 crc kubenswrapper[4810]: I1203 06:03:10.449850 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c","Type":"ContainerStarted","Data":"fd61cfe0fb6d84d9ceef9999105511fd59e1d63cf6c7be2337536c18ccdb647a"} Dec 03 06:03:10 crc kubenswrapper[4810]: I1203 06:03:10.485448 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.485425895 podStartE2EDuration="2.485425895s" podCreationTimestamp="2025-12-03 06:03:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:03:10.475820232 +0000 UTC m=+1314.411281083" watchObservedRunningTime="2025-12-03 06:03:10.485425895 +0000 UTC m=+1314.420886746" Dec 03 06:03:11 crc kubenswrapper[4810]: I1203 06:03:11.977806 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 06:03:11 crc kubenswrapper[4810]: I1203 06:03:11.978572 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 06:03:12 crc kubenswrapper[4810]: I1203 06:03:12.687883 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 06:03:16 crc kubenswrapper[4810]: I1203 06:03:16.977649 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 06:03:16 crc kubenswrapper[4810]: I1203 06:03:16.978432 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 06:03:17 crc kubenswrapper[4810]: I1203 06:03:17.688020 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 06:03:17 crc kubenswrapper[4810]: I1203 06:03:17.750600 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 06:03:17 crc kubenswrapper[4810]: I1203 06:03:17.993951 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="75e0323d-ca57-4a7e-a883-35da97b7e9d7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 06:03:17 crc kubenswrapper[4810]: I1203 06:03:17.994012 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="75e0323d-ca57-4a7e-a883-35da97b7e9d7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 06:03:18 crc kubenswrapper[4810]: I1203 06:03:18.590702 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 06:03:19 crc kubenswrapper[4810]: I1203 06:03:19.065975 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:03:19 crc kubenswrapper[4810]: I1203 06:03:19.066049 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 06:03:20 crc kubenswrapper[4810]: I1203 06:03:20.083908 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 06:03:20 crc kubenswrapper[4810]: I1203 06:03:20.083971 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 06:03:24 crc kubenswrapper[4810]: I1203 06:03:24.471242 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 06:03:25 crc kubenswrapper[4810]: I1203 06:03:25.677563 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:03:25 crc kubenswrapper[4810]: I1203 06:03:25.679708 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:03:26 crc kubenswrapper[4810]: I1203 06:03:26.983540 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 06:03:26 crc kubenswrapper[4810]: I1203 06:03:26.989798 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 06:03:26 crc kubenswrapper[4810]: I1203 06:03:26.998692 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 06:03:27 crc kubenswrapper[4810]: I1203 06:03:27.667351 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 06:03:29 crc kubenswrapper[4810]: I1203 06:03:29.080764 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 06:03:29 crc kubenswrapper[4810]: I1203 06:03:29.081638 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 06:03:29 crc kubenswrapper[4810]: I1203 06:03:29.082615 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 06:03:29 crc kubenswrapper[4810]: I1203 06:03:29.095799 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 06:03:29 crc kubenswrapper[4810]: I1203 06:03:29.682828 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 06:03:29 crc kubenswrapper[4810]: I1203 06:03:29.696987 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 06:03:38 crc kubenswrapper[4810]: I1203 06:03:38.440854 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 06:03:39 crc kubenswrapper[4810]: I1203 06:03:39.533545 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 06:03:43 crc kubenswrapper[4810]: I1203 06:03:43.426138 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerName="rabbitmq" containerID="cri-o://9323cdf4ddf6943307fa9a3dbccc919bc72405af9b884aa7b014b3c2b6faabef" gracePeriod=604796 Dec 03 06:03:45 crc kubenswrapper[4810]: I1203 06:03:45.009784 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerName="rabbitmq" containerID="cri-o://987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951" gracePeriod=604795 Dec 03 06:03:49 crc kubenswrapper[4810]: I1203 06:03:49.971898 4810 generic.go:334] "Generic (PLEG): container finished" podID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerID="9323cdf4ddf6943307fa9a3dbccc919bc72405af9b884aa7b014b3c2b6faabef" exitCode=0 Dec 03 06:03:49 crc kubenswrapper[4810]: I1203 06:03:49.973853 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"43dbe69c-d6b8-4773-ad88-79c3c975afdf","Type":"ContainerDied","Data":"9323cdf4ddf6943307fa9a3dbccc919bc72405af9b884aa7b014b3c2b6faabef"} Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.257554 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304195 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-config-data\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304271 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43dbe69c-d6b8-4773-ad88-79c3c975afdf-erlang-cookie-secret\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304305 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-plugins\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304344 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-confd\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304363 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-tls\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304398 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-plugins-conf\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304433 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-server-conf\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304562 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzfvt\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-kube-api-access-fzfvt\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304582 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304773 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-erlang-cookie\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.304833 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43dbe69c-d6b8-4773-ad88-79c3c975afdf-pod-info\") pod \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\" (UID: \"43dbe69c-d6b8-4773-ad88-79c3c975afdf\") " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.306114 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.314034 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dbe69c-d6b8-4773-ad88-79c3c975afdf-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.314586 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.315048 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.321553 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.334830 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.334981 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-kube-api-access-fzfvt" (OuterVolumeSpecName: "kube-api-access-fzfvt") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "kube-api-access-fzfvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.339042 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/43dbe69c-d6b8-4773-ad88-79c3c975afdf-pod-info" (OuterVolumeSpecName: "pod-info") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.386752 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-config-data" (OuterVolumeSpecName: "config-data") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.406907 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzfvt\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-kube-api-access-fzfvt\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.406954 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.406966 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.406978 4810 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43dbe69c-d6b8-4773-ad88-79c3c975afdf-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.406988 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.406998 4810 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43dbe69c-d6b8-4773-ad88-79c3c975afdf-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.407008 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.407024 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.407033 4810 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.433308 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-server-conf" (OuterVolumeSpecName: "server-conf") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.444959 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.497711 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "43dbe69c-d6b8-4773-ad88-79c3c975afdf" (UID: "43dbe69c-d6b8-4773-ad88-79c3c975afdf"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.508585 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43dbe69c-d6b8-4773-ad88-79c3c975afdf-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.508623 4810 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43dbe69c-d6b8-4773-ad88-79c3c975afdf-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.508637 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.983747 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"43dbe69c-d6b8-4773-ad88-79c3c975afdf","Type":"ContainerDied","Data":"1b161a3023869e8bc7b1479a9a9aead2ca52c51671bf58dcf61c829d8b4a4d67"} Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.984282 4810 scope.go:117] "RemoveContainer" containerID="9323cdf4ddf6943307fa9a3dbccc919bc72405af9b884aa7b014b3c2b6faabef" Dec 03 06:03:50 crc kubenswrapper[4810]: I1203 06:03:50.984453 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.009597 4810 scope.go:117] "RemoveContainer" containerID="e4949b492e4394fad97a05af5ecd0f3270a715f053d57898e6bca3965a05913d" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.029395 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.040426 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.078227 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 06:03:51 crc kubenswrapper[4810]: E1203 06:03:51.078858 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerName="rabbitmq" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.078884 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerName="rabbitmq" Dec 03 06:03:51 crc kubenswrapper[4810]: E1203 06:03:51.078911 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerName="setup-container" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.078920 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerName="setup-container" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.079213 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" containerName="rabbitmq" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.080711 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.082957 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.083123 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.083920 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-k7shm" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.084012 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.084056 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.084160 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.084441 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.094073 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.224881 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.224955 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.224987 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225008 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snzt7\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-kube-api-access-snzt7\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225068 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/743ad4f7-d246-495e-8f32-4ecf10c858bd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225093 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225118 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225152 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225183 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/743ad4f7-d246-495e-8f32-4ecf10c858bd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225217 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.225238 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-config-data\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327327 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/743ad4f7-d246-495e-8f32-4ecf10c858bd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327446 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327471 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-config-data\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327501 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327527 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327553 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327576 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snzt7\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-kube-api-access-snzt7\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327635 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/743ad4f7-d246-495e-8f32-4ecf10c858bd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327663 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327886 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.327899 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.328184 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.328544 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.328836 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-config-data\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.329141 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.329547 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/743ad4f7-d246-495e-8f32-4ecf10c858bd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.337927 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/743ad4f7-d246-495e-8f32-4ecf10c858bd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.338407 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/743ad4f7-d246-495e-8f32-4ecf10c858bd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.338460 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.351798 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.355435 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snzt7\" (UniqueName: \"kubernetes.io/projected/743ad4f7-d246-495e-8f32-4ecf10c858bd-kube-api-access-snzt7\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.377293 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"743ad4f7-d246-495e-8f32-4ecf10c858bd\") " pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.400224 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.620360 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.740160 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-plugins-conf\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.740933 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs2kj\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-kube-api-access-rs2kj\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741029 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741059 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/55cb8ef9-3722-41ab-8655-ccb1508619fd-pod-info\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741108 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-plugins\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741126 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-tls\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741153 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-erlang-cookie\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741183 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-confd\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741209 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/55cb8ef9-3722-41ab-8655-ccb1508619fd-erlang-cookie-secret\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741252 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-server-conf\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741275 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-config-data\") pod \"55cb8ef9-3722-41ab-8655-ccb1508619fd\" (UID: \"55cb8ef9-3722-41ab-8655-ccb1508619fd\") " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.741767 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.742614 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.743509 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.757141 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55cb8ef9-3722-41ab-8655-ccb1508619fd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.757467 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/55cb8ef9-3722-41ab-8655-ccb1508619fd-pod-info" (OuterVolumeSpecName: "pod-info") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.759436 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.760604 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-kube-api-access-rs2kj" (OuterVolumeSpecName: "kube-api-access-rs2kj") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "kube-api-access-rs2kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.767218 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.797370 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-config-data" (OuterVolumeSpecName: "config-data") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.810951 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-server-conf" (OuterVolumeSpecName: "server-conf") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.845942 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.845981 4810 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/55cb8ef9-3722-41ab-8655-ccb1508619fd-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.845995 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846005 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846017 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846032 4810 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/55cb8ef9-3722-41ab-8655-ccb1508619fd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846043 4810 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846056 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846066 4810 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/55cb8ef9-3722-41ab-8655-ccb1508619fd-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.846075 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs2kj\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-kube-api-access-rs2kj\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.892109 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "55cb8ef9-3722-41ab-8655-ccb1508619fd" (UID: "55cb8ef9-3722-41ab-8655-ccb1508619fd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.894459 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.947798 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:51 crc kubenswrapper[4810]: I1203 06:03:51.947830 4810 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/55cb8ef9-3722-41ab-8655-ccb1508619fd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.018811 4810 generic.go:334] "Generic (PLEG): container finished" podID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerID="987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951" exitCode=0 Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.018862 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"55cb8ef9-3722-41ab-8655-ccb1508619fd","Type":"ContainerDied","Data":"987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951"} Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.018892 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"55cb8ef9-3722-41ab-8655-ccb1508619fd","Type":"ContainerDied","Data":"2eb75d82e6241cfa11ef9bf36cf6b1306b01de7c577637c3adb4b0808f5069e5"} Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.018909 4810 scope.go:117] "RemoveContainer" containerID="987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.018961 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.021892 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 06:03:52 crc kubenswrapper[4810]: W1203 06:03:52.032229 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod743ad4f7_d246_495e_8f32_4ecf10c858bd.slice/crio-ab13b1b86f4ac73045775abb7d5c21446832d4a2c43806b64e8bee0319954720 WatchSource:0}: Error finding container ab13b1b86f4ac73045775abb7d5c21446832d4a2c43806b64e8bee0319954720: Status 404 returned error can't find the container with id ab13b1b86f4ac73045775abb7d5c21446832d4a2c43806b64e8bee0319954720 Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.061438 4810 scope.go:117] "RemoveContainer" containerID="af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.098363 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.142056 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.149753 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 06:03:52 crc kubenswrapper[4810]: E1203 06:03:52.150617 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerName="rabbitmq" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.150643 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerName="rabbitmq" Dec 03 06:03:52 crc kubenswrapper[4810]: E1203 06:03:52.150711 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerName="setup-container" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.150719 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerName="setup-container" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.150993 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" containerName="rabbitmq" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.152428 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.156041 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.156109 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.156511 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.156872 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.157070 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.157258 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.157709 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.157974 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-n5rld" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.171024 4810 scope.go:117] "RemoveContainer" containerID="987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951" Dec 03 06:03:52 crc kubenswrapper[4810]: E1203 06:03:52.172540 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951\": container with ID starting with 987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951 not found: ID does not exist" containerID="987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.172602 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951"} err="failed to get container status \"987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951\": rpc error: code = NotFound desc = could not find container \"987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951\": container with ID starting with 987e3fc665f8511663b42337c73e9f6a5b7c538e0b97dc6de0df1d3a25294951 not found: ID does not exist" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.172638 4810 scope.go:117] "RemoveContainer" containerID="af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb" Dec 03 06:03:52 crc kubenswrapper[4810]: E1203 06:03:52.175534 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb\": container with ID starting with af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb not found: ID does not exist" containerID="af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.175569 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb"} err="failed to get container status \"af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb\": rpc error: code = NotFound desc = could not find container \"af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb\": container with ID starting with af81ab93b3b929799698e9b07eb4a7d734fe0cf737b5387482e6e280f79efefb not found: ID does not exist" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.292952 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.293606 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.293701 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b4404434-bf13-4da3-a7df-d5ef032b4b67-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.293873 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.293956 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.294142 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.294235 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkljl\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-kube-api-access-hkljl\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.294309 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.294524 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.294913 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b4404434-bf13-4da3-a7df-d5ef032b4b67-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.295095 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.396841 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b4404434-bf13-4da3-a7df-d5ef032b4b67-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.396925 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.396969 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397078 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397124 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkljl\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-kube-api-access-hkljl\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397159 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397206 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397327 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b4404434-bf13-4da3-a7df-d5ef032b4b67-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397373 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397450 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397502 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.397922 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.398526 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.400515 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43dbe69c-d6b8-4773-ad88-79c3c975afdf" path="/var/lib/kubelet/pods/43dbe69c-d6b8-4773-ad88-79c3c975afdf/volumes" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.401684 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.401815 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55cb8ef9-3722-41ab-8655-ccb1508619fd" path="/var/lib/kubelet/pods/55cb8ef9-3722-41ab-8655-ccb1508619fd/volumes" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.402092 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.402545 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b4404434-bf13-4da3-a7df-d5ef032b4b67-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.402841 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b4404434-bf13-4da3-a7df-d5ef032b4b67-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.403480 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.404136 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.404213 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b4404434-bf13-4da3-a7df-d5ef032b4b67-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.404719 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b4404434-bf13-4da3-a7df-d5ef032b4b67-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.423598 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkljl\" (UniqueName: \"kubernetes.io/projected/b4404434-bf13-4da3-a7df-d5ef032b4b67-kube-api-access-hkljl\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.455853 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b4404434-bf13-4da3-a7df-d5ef032b4b67\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:52 crc kubenswrapper[4810]: I1203 06:03:52.483216 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.043495 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"743ad4f7-d246-495e-8f32-4ecf10c858bd","Type":"ContainerStarted","Data":"ab13b1b86f4ac73045775abb7d5c21446832d4a2c43806b64e8bee0319954720"} Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.095672 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.533771 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d8d97df57-7vfvb"] Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.536663 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.555057 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.562940 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d8d97df57-7vfvb"] Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.627906 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-sb\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.628013 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-nb\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.628065 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-config\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.628090 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-svc\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.628191 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6rkl\" (UniqueName: \"kubernetes.io/projected/69acf595-49b3-4d7f-a87d-70ab114ac467-kube-api-access-t6rkl\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.628258 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-swift-storage-0\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.628307 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-openstack-edpm-ipam\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.730796 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6rkl\" (UniqueName: \"kubernetes.io/projected/69acf595-49b3-4d7f-a87d-70ab114ac467-kube-api-access-t6rkl\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.731326 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-swift-storage-0\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.731396 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-openstack-edpm-ipam\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.731478 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-sb\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.731513 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-nb\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.731542 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-config\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.731564 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-svc\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.732570 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-swift-storage-0\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.732568 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-sb\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.732710 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-config\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.733107 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-nb\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.733443 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-openstack-edpm-ipam\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.733627 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-svc\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.786122 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6rkl\" (UniqueName: \"kubernetes.io/projected/69acf595-49b3-4d7f-a87d-70ab114ac467-kube-api-access-t6rkl\") pod \"dnsmasq-dns-7d8d97df57-7vfvb\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:53 crc kubenswrapper[4810]: I1203 06:03:53.861833 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:54 crc kubenswrapper[4810]: I1203 06:03:54.062349 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b4404434-bf13-4da3-a7df-d5ef032b4b67","Type":"ContainerStarted","Data":"41af15be73192c2583471de0d826c3571985276e1be0783f66521c3ec2f5c3e9"} Dec 03 06:03:54 crc kubenswrapper[4810]: I1203 06:03:54.370309 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d8d97df57-7vfvb"] Dec 03 06:03:54 crc kubenswrapper[4810]: W1203 06:03:54.381403 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice/crio-bf44b70a4e5d3515945760e06940ef72af462bae4c276398104daf22f6181400 WatchSource:0}: Error finding container bf44b70a4e5d3515945760e06940ef72af462bae4c276398104daf22f6181400: Status 404 returned error can't find the container with id bf44b70a4e5d3515945760e06940ef72af462bae4c276398104daf22f6181400 Dec 03 06:03:55 crc kubenswrapper[4810]: I1203 06:03:55.077118 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"743ad4f7-d246-495e-8f32-4ecf10c858bd","Type":"ContainerStarted","Data":"7b8ad98aea7d98724b79fb36f4ca1d373912a8ea70ee7f2454b081f7b5d1a621"} Dec 03 06:03:55 crc kubenswrapper[4810]: I1203 06:03:55.079912 4810 generic.go:334] "Generic (PLEG): container finished" podID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerID="730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80" exitCode=0 Dec 03 06:03:55 crc kubenswrapper[4810]: I1203 06:03:55.079956 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" event={"ID":"69acf595-49b3-4d7f-a87d-70ab114ac467","Type":"ContainerDied","Data":"730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80"} Dec 03 06:03:55 crc kubenswrapper[4810]: I1203 06:03:55.079982 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" event={"ID":"69acf595-49b3-4d7f-a87d-70ab114ac467","Type":"ContainerStarted","Data":"bf44b70a4e5d3515945760e06940ef72af462bae4c276398104daf22f6181400"} Dec 03 06:03:55 crc kubenswrapper[4810]: I1203 06:03:55.678547 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:03:55 crc kubenswrapper[4810]: I1203 06:03:55.678665 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:03:56 crc kubenswrapper[4810]: I1203 06:03:56.099433 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b4404434-bf13-4da3-a7df-d5ef032b4b67","Type":"ContainerStarted","Data":"e07f780d9351c24e6bd766c4c6fbf282cb90e25aef0ef60d9cc35433e5b32517"} Dec 03 06:03:56 crc kubenswrapper[4810]: I1203 06:03:56.102855 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" event={"ID":"69acf595-49b3-4d7f-a87d-70ab114ac467","Type":"ContainerStarted","Data":"19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1"} Dec 03 06:03:56 crc kubenswrapper[4810]: I1203 06:03:56.103078 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:03:56 crc kubenswrapper[4810]: I1203 06:03:56.182328 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" podStartSLOduration=3.182282018 podStartE2EDuration="3.182282018s" podCreationTimestamp="2025-12-03 06:03:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:03:56.171518404 +0000 UTC m=+1360.106979275" watchObservedRunningTime="2025-12-03 06:03:56.182282018 +0000 UTC m=+1360.117742899" Dec 03 06:04:03 crc kubenswrapper[4810]: I1203 06:04:03.864005 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:04:03 crc kubenswrapper[4810]: I1203 06:04:03.948208 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79785b5f-skdbm"] Dec 03 06:04:03 crc kubenswrapper[4810]: I1203 06:04:03.948609 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79785b5f-skdbm" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="dnsmasq-dns" containerID="cri-o://d71cf6b485e138d3d7e7e1ceb87a65ea1f85faae53fa3ecf910ddad7674c3230" gracePeriod=10 Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.195083 4810 generic.go:334] "Generic (PLEG): container finished" podID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerID="d71cf6b485e138d3d7e7e1ceb87a65ea1f85faae53fa3ecf910ddad7674c3230" exitCode=0 Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.195434 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79785b5f-skdbm" event={"ID":"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180","Type":"ContainerDied","Data":"d71cf6b485e138d3d7e7e1ceb87a65ea1f85faae53fa3ecf910ddad7674c3230"} Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.209715 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-797f4ccc47-h492p"] Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.211376 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.221460 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-797f4ccc47-h492p"] Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.362637 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-ovsdbserver-nb\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.362713 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-config\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.362781 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-ovsdbserver-sb\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.362835 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-dns-svc\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.363065 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2xt8\" (UniqueName: \"kubernetes.io/projected/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-kube-api-access-p2xt8\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.363144 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-openstack-edpm-ipam\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.363246 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-dns-swift-storage-0\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468119 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-ovsdbserver-sb\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468224 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-dns-svc\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468281 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2xt8\" (UniqueName: \"kubernetes.io/projected/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-kube-api-access-p2xt8\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468311 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-openstack-edpm-ipam\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468360 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-dns-swift-storage-0\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468396 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-ovsdbserver-nb\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.468448 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-config\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.469346 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-ovsdbserver-sb\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.469390 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-config\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.469834 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-openstack-edpm-ipam\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.470099 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-ovsdbserver-nb\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.470118 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-dns-svc\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.470547 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-dns-swift-storage-0\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.493327 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2xt8\" (UniqueName: \"kubernetes.io/projected/c2e2cdc0-2bb3-450f-b42d-8bfeee479f46-kube-api-access-p2xt8\") pod \"dnsmasq-dns-797f4ccc47-h492p\" (UID: \"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46\") " pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.538255 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.636231 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.773813 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-config\") pod \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.774226 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-nb\") pod \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.774258 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-sb\") pod \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.774331 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-swift-storage-0\") pod \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.774415 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b88c9\" (UniqueName: \"kubernetes.io/projected/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-kube-api-access-b88c9\") pod \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.774446 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-svc\") pod \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\" (UID: \"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180\") " Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.783923 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-kube-api-access-b88c9" (OuterVolumeSpecName: "kube-api-access-b88c9") pod "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" (UID: "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180"). InnerVolumeSpecName "kube-api-access-b88c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.840455 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" (UID: "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.845543 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-config" (OuterVolumeSpecName: "config") pod "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" (UID: "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.845678 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" (UID: "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.850114 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" (UID: "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.858331 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" (UID: "19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.881558 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.881600 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b88c9\" (UniqueName: \"kubernetes.io/projected/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-kube-api-access-b88c9\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.881615 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.881629 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.881642 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:04 crc kubenswrapper[4810]: I1203 06:04:04.881653 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.061095 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-797f4ccc47-h492p"] Dec 03 06:04:05 crc kubenswrapper[4810]: W1203 06:04:05.064826 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2e2cdc0_2bb3_450f_b42d_8bfeee479f46.slice/crio-ffc4e5ecd53077552a2acfd20b3f1f68c7e91c72c79b75c1a9e13cfa9b954f77 WatchSource:0}: Error finding container ffc4e5ecd53077552a2acfd20b3f1f68c7e91c72c79b75c1a9e13cfa9b954f77: Status 404 returned error can't find the container with id ffc4e5ecd53077552a2acfd20b3f1f68c7e91c72c79b75c1a9e13cfa9b954f77 Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.213372 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" event={"ID":"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46","Type":"ContainerStarted","Data":"ffc4e5ecd53077552a2acfd20b3f1f68c7e91c72c79b75c1a9e13cfa9b954f77"} Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.216306 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79785b5f-skdbm" event={"ID":"19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180","Type":"ContainerDied","Data":"cfb07630bd51d6d658d49223304d899855bf419981f5dcc73d2e1f51eef235a5"} Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.216377 4810 scope.go:117] "RemoveContainer" containerID="d71cf6b485e138d3d7e7e1ceb87a65ea1f85faae53fa3ecf910ddad7674c3230" Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.216412 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79785b5f-skdbm" Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.259249 4810 scope.go:117] "RemoveContainer" containerID="0d06bdc0a3823df2fe72e6b526e030e90514dd22b59d171bd1ce4223f53c3584" Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.265069 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79785b5f-skdbm"] Dec 03 06:04:05 crc kubenswrapper[4810]: I1203 06:04:05.274164 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79785b5f-skdbm"] Dec 03 06:04:06 crc kubenswrapper[4810]: I1203 06:04:06.243343 4810 generic.go:334] "Generic (PLEG): container finished" podID="c2e2cdc0-2bb3-450f-b42d-8bfeee479f46" containerID="5468e4254bd9ca8336a660fa6fb33fe688dbc42df85b502aaaef3b7060bc4584" exitCode=0 Dec 03 06:04:06 crc kubenswrapper[4810]: I1203 06:04:06.243479 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" event={"ID":"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46","Type":"ContainerDied","Data":"5468e4254bd9ca8336a660fa6fb33fe688dbc42df85b502aaaef3b7060bc4584"} Dec 03 06:04:06 crc kubenswrapper[4810]: I1203 06:04:06.392521 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" path="/var/lib/kubelet/pods/19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180/volumes" Dec 03 06:04:07 crc kubenswrapper[4810]: I1203 06:04:07.257552 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" event={"ID":"c2e2cdc0-2bb3-450f-b42d-8bfeee479f46","Type":"ContainerStarted","Data":"2cead53934deed5b6763a6d66960fcbad7a5c1bdc8e4d1ce892b341eb5d2fa01"} Dec 03 06:04:07 crc kubenswrapper[4810]: I1203 06:04:07.257800 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:07 crc kubenswrapper[4810]: I1203 06:04:07.286978 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" podStartSLOduration=3.286957272 podStartE2EDuration="3.286957272s" podCreationTimestamp="2025-12-03 06:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:04:07.282391282 +0000 UTC m=+1371.217852143" watchObservedRunningTime="2025-12-03 06:04:07.286957272 +0000 UTC m=+1371.222418113" Dec 03 06:04:09 crc kubenswrapper[4810]: I1203 06:04:09.409572 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-79785b5f-skdbm" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.194:5353: i/o timeout" Dec 03 06:04:14 crc kubenswrapper[4810]: I1203 06:04:14.539899 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-797f4ccc47-h492p" Dec 03 06:04:15 crc kubenswrapper[4810]: I1203 06:04:15.214619 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d8d97df57-7vfvb"] Dec 03 06:04:15 crc kubenswrapper[4810]: I1203 06:04:15.230347 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerName="dnsmasq-dns" containerID="cri-o://19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1" gracePeriod=10 Dec 03 06:04:15 crc kubenswrapper[4810]: I1203 06:04:15.859852 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002305 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-nb\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002507 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-config\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002534 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-swift-storage-0\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002693 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-sb\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002780 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-svc\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002812 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-openstack-edpm-ipam\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.002886 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6rkl\" (UniqueName: \"kubernetes.io/projected/69acf595-49b3-4d7f-a87d-70ab114ac467-kube-api-access-t6rkl\") pod \"69acf595-49b3-4d7f-a87d-70ab114ac467\" (UID: \"69acf595-49b3-4d7f-a87d-70ab114ac467\") " Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.028847 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69acf595-49b3-4d7f-a87d-70ab114ac467-kube-api-access-t6rkl" (OuterVolumeSpecName: "kube-api-access-t6rkl") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "kube-api-access-t6rkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.060260 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.066161 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.067234 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.067686 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.073963 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-config" (OuterVolumeSpecName: "config") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.082780 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "69acf595-49b3-4d7f-a87d-70ab114ac467" (UID: "69acf595-49b3-4d7f-a87d-70ab114ac467"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104926 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104953 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104962 4810 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104973 4810 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104982 4810 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104989 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/69acf595-49b3-4d7f-a87d-70ab114ac467-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.104997 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6rkl\" (UniqueName: \"kubernetes.io/projected/69acf595-49b3-4d7f-a87d-70ab114ac467-kube-api-access-t6rkl\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.379687 4810 generic.go:334] "Generic (PLEG): container finished" podID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerID="19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1" exitCode=0 Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.385921 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.398415 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" event={"ID":"69acf595-49b3-4d7f-a87d-70ab114ac467","Type":"ContainerDied","Data":"19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1"} Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.398552 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d8d97df57-7vfvb" event={"ID":"69acf595-49b3-4d7f-a87d-70ab114ac467","Type":"ContainerDied","Data":"bf44b70a4e5d3515945760e06940ef72af462bae4c276398104daf22f6181400"} Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.398622 4810 scope.go:117] "RemoveContainer" containerID="19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.432915 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d8d97df57-7vfvb"] Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.440979 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d8d97df57-7vfvb"] Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.450451 4810 scope.go:117] "RemoveContainer" containerID="730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.475655 4810 scope.go:117] "RemoveContainer" containerID="19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1" Dec 03 06:04:16 crc kubenswrapper[4810]: E1203 06:04:16.476203 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1\": container with ID starting with 19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1 not found: ID does not exist" containerID="19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.476280 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1"} err="failed to get container status \"19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1\": rpc error: code = NotFound desc = could not find container \"19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1\": container with ID starting with 19281f4f5c66975bdadffc5588833f81d2cf0eb2df93ab5ffd405e97b527bcd1 not found: ID does not exist" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.476311 4810 scope.go:117] "RemoveContainer" containerID="730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80" Dec 03 06:04:16 crc kubenswrapper[4810]: E1203 06:04:16.476681 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80\": container with ID starting with 730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80 not found: ID does not exist" containerID="730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80" Dec 03 06:04:16 crc kubenswrapper[4810]: I1203 06:04:16.476777 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80"} err="failed to get container status \"730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80\": rpc error: code = NotFound desc = could not find container \"730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80\": container with ID starting with 730238700f4b670ab7cdc293d6719de9cc67496b34930fa7834117db67f68a80 not found: ID does not exist" Dec 03 06:04:18 crc kubenswrapper[4810]: I1203 06:04:18.395418 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" path="/var/lib/kubelet/pods/69acf595-49b3-4d7f-a87d-70ab114ac467/volumes" Dec 03 06:04:23 crc kubenswrapper[4810]: E1203 06:04:23.309118 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice\": RecentStats: unable to find data in memory cache]" Dec 03 06:04:25 crc kubenswrapper[4810]: I1203 06:04:25.677080 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:04:25 crc kubenswrapper[4810]: I1203 06:04:25.677652 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:04:25 crc kubenswrapper[4810]: I1203 06:04:25.677768 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:04:25 crc kubenswrapper[4810]: I1203 06:04:25.678943 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c2d89f1bb4f8093d6c6c727461ec0b9a6e075b6b1856024e1665f68befa27390"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:04:25 crc kubenswrapper[4810]: I1203 06:04:25.679050 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://c2d89f1bb4f8093d6c6c727461ec0b9a6e075b6b1856024e1665f68befa27390" gracePeriod=600 Dec 03 06:04:26 crc kubenswrapper[4810]: I1203 06:04:26.495664 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="c2d89f1bb4f8093d6c6c727461ec0b9a6e075b6b1856024e1665f68befa27390" exitCode=0 Dec 03 06:04:26 crc kubenswrapper[4810]: I1203 06:04:26.495794 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"c2d89f1bb4f8093d6c6c727461ec0b9a6e075b6b1856024e1665f68befa27390"} Dec 03 06:04:26 crc kubenswrapper[4810]: I1203 06:04:26.496401 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299"} Dec 03 06:04:26 crc kubenswrapper[4810]: I1203 06:04:26.496432 4810 scope.go:117] "RemoveContainer" containerID="44c04cb46113a276349e0decdfe2671ab188ec4674c9c0e0a836be88642df5e3" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.512586 4810 generic.go:334] "Generic (PLEG): container finished" podID="b4404434-bf13-4da3-a7df-d5ef032b4b67" containerID="e07f780d9351c24e6bd766c4c6fbf282cb90e25aef0ef60d9cc35433e5b32517" exitCode=0 Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.512661 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b4404434-bf13-4da3-a7df-d5ef032b4b67","Type":"ContainerDied","Data":"e07f780d9351c24e6bd766c4c6fbf282cb90e25aef0ef60d9cc35433e5b32517"} Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.529442 4810 generic.go:334] "Generic (PLEG): container finished" podID="743ad4f7-d246-495e-8f32-4ecf10c858bd" containerID="7b8ad98aea7d98724b79fb36f4ca1d373912a8ea70ee7f2454b081f7b5d1a621" exitCode=0 Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.529511 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"743ad4f7-d246-495e-8f32-4ecf10c858bd","Type":"ContainerDied","Data":"7b8ad98aea7d98724b79fb36f4ca1d373912a8ea70ee7f2454b081f7b5d1a621"} Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.713437 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2"] Dec 03 06:04:27 crc kubenswrapper[4810]: E1203 06:04:27.714237 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="init" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.714258 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="init" Dec 03 06:04:27 crc kubenswrapper[4810]: E1203 06:04:27.714275 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerName="init" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.714283 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerName="init" Dec 03 06:04:27 crc kubenswrapper[4810]: E1203 06:04:27.714296 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerName="dnsmasq-dns" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.714304 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerName="dnsmasq-dns" Dec 03 06:04:27 crc kubenswrapper[4810]: E1203 06:04:27.714333 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="dnsmasq-dns" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.714339 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="dnsmasq-dns" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.714502 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="19bdbb15-10b3-4eb4-8e7f-ffe5d8fa2180" containerName="dnsmasq-dns" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.714530 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="69acf595-49b3-4d7f-a87d-70ab114ac467" containerName="dnsmasq-dns" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.717580 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.761824 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.762095 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.762282 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.764093 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.775045 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2"] Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.890588 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.891036 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.891366 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.891461 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dd2s\" (UniqueName: \"kubernetes.io/projected/64d3f7de-ee70-4197-b7ba-547459e0dfef-kube-api-access-6dd2s\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.993952 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.994078 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dd2s\" (UniqueName: \"kubernetes.io/projected/64d3f7de-ee70-4197-b7ba-547459e0dfef-kube-api-access-6dd2s\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.994340 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:27 crc kubenswrapper[4810]: I1203 06:04:27.994478 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.003499 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.003587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.004310 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.016119 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dd2s\" (UniqueName: \"kubernetes.io/projected/64d3f7de-ee70-4197-b7ba-547459e0dfef-kube-api-access-6dd2s\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.151935 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.542963 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"743ad4f7-d246-495e-8f32-4ecf10c858bd","Type":"ContainerStarted","Data":"2378502ef163f79a010290ed0dc0f9a55f9e619399e2d6f282da2017252749df"} Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.543946 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.548000 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b4404434-bf13-4da3-a7df-d5ef032b4b67","Type":"ContainerStarted","Data":"950eea1227e2fb7d9a009cd1f1461a84452606b74477b9d1ebf44ecd1cc1372b"} Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.549330 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.584425 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.58438319 podStartE2EDuration="37.58438319s" podCreationTimestamp="2025-12-03 06:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:04:28.574810798 +0000 UTC m=+1392.510271649" watchObservedRunningTime="2025-12-03 06:04:28.58438319 +0000 UTC m=+1392.519844031" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.610469 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.610438856 podStartE2EDuration="36.610438856s" podCreationTimestamp="2025-12-03 06:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:04:28.597213098 +0000 UTC m=+1392.532673949" watchObservedRunningTime="2025-12-03 06:04:28.610438856 +0000 UTC m=+1392.545899697" Dec 03 06:04:28 crc kubenswrapper[4810]: I1203 06:04:28.804874 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2"] Dec 03 06:04:29 crc kubenswrapper[4810]: I1203 06:04:29.562551 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" event={"ID":"64d3f7de-ee70-4197-b7ba-547459e0dfef","Type":"ContainerStarted","Data":"320d40bdf718e4ccca9c11fc2a379eca00cc2cb094d0b2c6d4a62ca89b717f53"} Dec 03 06:04:33 crc kubenswrapper[4810]: E1203 06:04:33.646212 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice\": RecentStats: unable to find data in memory cache]" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.188386 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b5d5d"] Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.197057 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.222108 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b5d5d"] Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.292037 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-utilities\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.292150 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szt7s\" (UniqueName: \"kubernetes.io/projected/81f29f2b-742a-4879-b0ab-0de51455d74a-kube-api-access-szt7s\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.292244 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-catalog-content\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.394952 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-catalog-content\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.395108 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-utilities\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.395149 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szt7s\" (UniqueName: \"kubernetes.io/projected/81f29f2b-742a-4879-b0ab-0de51455d74a-kube-api-access-szt7s\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.395946 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-utilities\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.395970 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-catalog-content\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.418767 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szt7s\" (UniqueName: \"kubernetes.io/projected/81f29f2b-742a-4879-b0ab-0de51455d74a-kube-api-access-szt7s\") pod \"redhat-operators-b5d5d\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:35 crc kubenswrapper[4810]: I1203 06:04:35.535715 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:39 crc kubenswrapper[4810]: I1203 06:04:39.456803 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b5d5d"] Dec 03 06:04:39 crc kubenswrapper[4810]: I1203 06:04:39.698794 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" event={"ID":"64d3f7de-ee70-4197-b7ba-547459e0dfef","Type":"ContainerStarted","Data":"7e0f3a2b000e206785e450eb8e8e9535360706acfaa24177b7e91c8a062ac216"} Dec 03 06:04:39 crc kubenswrapper[4810]: I1203 06:04:39.704560 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5d5d" event={"ID":"81f29f2b-742a-4879-b0ab-0de51455d74a","Type":"ContainerStarted","Data":"4e630464b3ab311d3bfb7e14b458142367ad48058b7012696762c5904a0fdfdd"} Dec 03 06:04:39 crc kubenswrapper[4810]: I1203 06:04:39.731609 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" podStartSLOduration=2.552899058 podStartE2EDuration="12.731577073s" podCreationTimestamp="2025-12-03 06:04:27 +0000 UTC" firstStartedPulling="2025-12-03 06:04:28.812725061 +0000 UTC m=+1392.748185902" lastFinishedPulling="2025-12-03 06:04:38.991403066 +0000 UTC m=+1402.926863917" observedRunningTime="2025-12-03 06:04:39.725116463 +0000 UTC m=+1403.660577314" watchObservedRunningTime="2025-12-03 06:04:39.731577073 +0000 UTC m=+1403.667037924" Dec 03 06:04:40 crc kubenswrapper[4810]: I1203 06:04:40.716031 4810 generic.go:334] "Generic (PLEG): container finished" podID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerID="72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c" exitCode=0 Dec 03 06:04:40 crc kubenswrapper[4810]: I1203 06:04:40.716126 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5d5d" event={"ID":"81f29f2b-742a-4879-b0ab-0de51455d74a","Type":"ContainerDied","Data":"72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c"} Dec 03 06:04:41 crc kubenswrapper[4810]: I1203 06:04:41.409476 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 06:04:42 crc kubenswrapper[4810]: I1203 06:04:42.488035 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 06:04:43 crc kubenswrapper[4810]: E1203 06:04:43.922766 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice\": RecentStats: unable to find data in memory cache]" Dec 03 06:04:45 crc kubenswrapper[4810]: I1203 06:04:45.796712 4810 generic.go:334] "Generic (PLEG): container finished" podID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerID="9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348" exitCode=0 Dec 03 06:04:45 crc kubenswrapper[4810]: I1203 06:04:45.797124 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5d5d" event={"ID":"81f29f2b-742a-4879-b0ab-0de51455d74a","Type":"ContainerDied","Data":"9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348"} Dec 03 06:04:46 crc kubenswrapper[4810]: I1203 06:04:46.813837 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5d5d" event={"ID":"81f29f2b-742a-4879-b0ab-0de51455d74a","Type":"ContainerStarted","Data":"f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1"} Dec 03 06:04:46 crc kubenswrapper[4810]: I1203 06:04:46.843544 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b5d5d" podStartSLOduration=6.277066082 podStartE2EDuration="11.84352188s" podCreationTimestamp="2025-12-03 06:04:35 +0000 UTC" firstStartedPulling="2025-12-03 06:04:40.720125039 +0000 UTC m=+1404.655585910" lastFinishedPulling="2025-12-03 06:04:46.286580857 +0000 UTC m=+1410.222041708" observedRunningTime="2025-12-03 06:04:46.838390335 +0000 UTC m=+1410.773851216" watchObservedRunningTime="2025-12-03 06:04:46.84352188 +0000 UTC m=+1410.778982741" Dec 03 06:04:53 crc kubenswrapper[4810]: I1203 06:04:53.908263 4810 generic.go:334] "Generic (PLEG): container finished" podID="64d3f7de-ee70-4197-b7ba-547459e0dfef" containerID="7e0f3a2b000e206785e450eb8e8e9535360706acfaa24177b7e91c8a062ac216" exitCode=0 Dec 03 06:04:53 crc kubenswrapper[4810]: I1203 06:04:53.908341 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" event={"ID":"64d3f7de-ee70-4197-b7ba-547459e0dfef","Type":"ContainerDied","Data":"7e0f3a2b000e206785e450eb8e8e9535360706acfaa24177b7e91c8a062ac216"} Dec 03 06:04:54 crc kubenswrapper[4810]: E1203 06:04:54.203154 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice\": RecentStats: unable to find data in memory cache]" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.467865 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.535954 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.536045 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.619043 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.646195 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-ssh-key\") pod \"64d3f7de-ee70-4197-b7ba-547459e0dfef\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.646410 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-inventory\") pod \"64d3f7de-ee70-4197-b7ba-547459e0dfef\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.646571 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-repo-setup-combined-ca-bundle\") pod \"64d3f7de-ee70-4197-b7ba-547459e0dfef\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.646739 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dd2s\" (UniqueName: \"kubernetes.io/projected/64d3f7de-ee70-4197-b7ba-547459e0dfef-kube-api-access-6dd2s\") pod \"64d3f7de-ee70-4197-b7ba-547459e0dfef\" (UID: \"64d3f7de-ee70-4197-b7ba-547459e0dfef\") " Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.655830 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64d3f7de-ee70-4197-b7ba-547459e0dfef-kube-api-access-6dd2s" (OuterVolumeSpecName: "kube-api-access-6dd2s") pod "64d3f7de-ee70-4197-b7ba-547459e0dfef" (UID: "64d3f7de-ee70-4197-b7ba-547459e0dfef"). InnerVolumeSpecName "kube-api-access-6dd2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.658187 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "64d3f7de-ee70-4197-b7ba-547459e0dfef" (UID: "64d3f7de-ee70-4197-b7ba-547459e0dfef"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.683351 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-inventory" (OuterVolumeSpecName: "inventory") pod "64d3f7de-ee70-4197-b7ba-547459e0dfef" (UID: "64d3f7de-ee70-4197-b7ba-547459e0dfef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.688243 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "64d3f7de-ee70-4197-b7ba-547459e0dfef" (UID: "64d3f7de-ee70-4197-b7ba-547459e0dfef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.750667 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.750717 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.750754 4810 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d3f7de-ee70-4197-b7ba-547459e0dfef-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.750777 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dd2s\" (UniqueName: \"kubernetes.io/projected/64d3f7de-ee70-4197-b7ba-547459e0dfef-kube-api-access-6dd2s\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.939407 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" event={"ID":"64d3f7de-ee70-4197-b7ba-547459e0dfef","Type":"ContainerDied","Data":"320d40bdf718e4ccca9c11fc2a379eca00cc2cb094d0b2c6d4a62ca89b717f53"} Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.939519 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="320d40bdf718e4ccca9c11fc2a379eca00cc2cb094d0b2c6d4a62ca89b717f53" Dec 03 06:04:55 crc kubenswrapper[4810]: I1203 06:04:55.939535 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.021344 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.087236 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b5d5d"] Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.134977 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh"] Dec 03 06:04:56 crc kubenswrapper[4810]: E1203 06:04:56.135571 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d3f7de-ee70-4197-b7ba-547459e0dfef" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.135596 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d3f7de-ee70-4197-b7ba-547459e0dfef" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.135961 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d3f7de-ee70-4197-b7ba-547459e0dfef" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.137092 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.140819 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.141928 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.142046 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.145061 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.161261 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh"] Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.162302 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.162553 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.162649 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4drrn\" (UniqueName: \"kubernetes.io/projected/f7a1032e-664c-477e-93be-b363dce922bb-kube-api-access-4drrn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.264370 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4drrn\" (UniqueName: \"kubernetes.io/projected/f7a1032e-664c-477e-93be-b363dce922bb-kube-api-access-4drrn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.264421 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.264540 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.270041 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.270628 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.292517 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4drrn\" (UniqueName: \"kubernetes.io/projected/f7a1032e-664c-477e-93be-b363dce922bb-kube-api-access-4drrn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-45tnh\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:56 crc kubenswrapper[4810]: I1203 06:04:56.490834 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:04:57 crc kubenswrapper[4810]: I1203 06:04:57.108539 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh"] Dec 03 06:04:57 crc kubenswrapper[4810]: W1203 06:04:57.123354 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7a1032e_664c_477e_93be_b363dce922bb.slice/crio-ee0cfa83d1e73b94903c1f2782ebef3fee3192eeb74bffc3bc6c54b50437d011 WatchSource:0}: Error finding container ee0cfa83d1e73b94903c1f2782ebef3fee3192eeb74bffc3bc6c54b50437d011: Status 404 returned error can't find the container with id ee0cfa83d1e73b94903c1f2782ebef3fee3192eeb74bffc3bc6c54b50437d011 Dec 03 06:04:57 crc kubenswrapper[4810]: I1203 06:04:57.973872 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" event={"ID":"f7a1032e-664c-477e-93be-b363dce922bb","Type":"ContainerStarted","Data":"ee0cfa83d1e73b94903c1f2782ebef3fee3192eeb74bffc3bc6c54b50437d011"} Dec 03 06:04:57 crc kubenswrapper[4810]: I1203 06:04:57.974243 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b5d5d" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="registry-server" containerID="cri-o://f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1" gracePeriod=2 Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.489523 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.524528 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-catalog-content\") pod \"81f29f2b-742a-4879-b0ab-0de51455d74a\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.524610 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szt7s\" (UniqueName: \"kubernetes.io/projected/81f29f2b-742a-4879-b0ab-0de51455d74a-kube-api-access-szt7s\") pod \"81f29f2b-742a-4879-b0ab-0de51455d74a\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.524653 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-utilities\") pod \"81f29f2b-742a-4879-b0ab-0de51455d74a\" (UID: \"81f29f2b-742a-4879-b0ab-0de51455d74a\") " Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.525851 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-utilities" (OuterVolumeSpecName: "utilities") pod "81f29f2b-742a-4879-b0ab-0de51455d74a" (UID: "81f29f2b-742a-4879-b0ab-0de51455d74a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.562016 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81f29f2b-742a-4879-b0ab-0de51455d74a-kube-api-access-szt7s" (OuterVolumeSpecName: "kube-api-access-szt7s") pod "81f29f2b-742a-4879-b0ab-0de51455d74a" (UID: "81f29f2b-742a-4879-b0ab-0de51455d74a"). InnerVolumeSpecName "kube-api-access-szt7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.627788 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szt7s\" (UniqueName: \"kubernetes.io/projected/81f29f2b-742a-4879-b0ab-0de51455d74a-kube-api-access-szt7s\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.628115 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.677556 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81f29f2b-742a-4879-b0ab-0de51455d74a" (UID: "81f29f2b-742a-4879-b0ab-0de51455d74a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.730894 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f29f2b-742a-4879-b0ab-0de51455d74a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.992357 4810 generic.go:334] "Generic (PLEG): container finished" podID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerID="f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1" exitCode=0 Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.992469 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5d5d" event={"ID":"81f29f2b-742a-4879-b0ab-0de51455d74a","Type":"ContainerDied","Data":"f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1"} Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.992502 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5d5d" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.992562 4810 scope.go:117] "RemoveContainer" containerID="f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1" Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.992547 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5d5d" event={"ID":"81f29f2b-742a-4879-b0ab-0de51455d74a","Type":"ContainerDied","Data":"4e630464b3ab311d3bfb7e14b458142367ad48058b7012696762c5904a0fdfdd"} Dec 03 06:04:58 crc kubenswrapper[4810]: I1203 06:04:58.996309 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" event={"ID":"f7a1032e-664c-477e-93be-b363dce922bb","Type":"ContainerStarted","Data":"f53ec88e5b47798295493b3bc65136124f4013ab6c78c8838773cdb380e75e27"} Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.040682 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" podStartSLOduration=2.146155424 podStartE2EDuration="3.040535451s" podCreationTimestamp="2025-12-03 06:04:56 +0000 UTC" firstStartedPulling="2025-12-03 06:04:57.128706867 +0000 UTC m=+1421.064167718" lastFinishedPulling="2025-12-03 06:04:58.023086904 +0000 UTC m=+1421.958547745" observedRunningTime="2025-12-03 06:04:59.020136054 +0000 UTC m=+1422.955596965" watchObservedRunningTime="2025-12-03 06:04:59.040535451 +0000 UTC m=+1422.975996352" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.057709 4810 scope.go:117] "RemoveContainer" containerID="9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.086608 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b5d5d"] Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.105050 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b5d5d"] Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.112569 4810 scope.go:117] "RemoveContainer" containerID="72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.150008 4810 scope.go:117] "RemoveContainer" containerID="f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1" Dec 03 06:04:59 crc kubenswrapper[4810]: E1203 06:04:59.150607 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1\": container with ID starting with f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1 not found: ID does not exist" containerID="f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.150693 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1"} err="failed to get container status \"f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1\": rpc error: code = NotFound desc = could not find container \"f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1\": container with ID starting with f51ff8b06a20a3e214df87b7fa4a7de10421b8774dcb5483fb8f8571c03e99e1 not found: ID does not exist" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.150785 4810 scope.go:117] "RemoveContainer" containerID="9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348" Dec 03 06:04:59 crc kubenswrapper[4810]: E1203 06:04:59.154173 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348\": container with ID starting with 9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348 not found: ID does not exist" containerID="9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.154248 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348"} err="failed to get container status \"9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348\": rpc error: code = NotFound desc = could not find container \"9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348\": container with ID starting with 9e8e34b1970f4d5aff69be241785866c5cb7272a0271c9d4e4bdc607957c8348 not found: ID does not exist" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.154297 4810 scope.go:117] "RemoveContainer" containerID="72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c" Dec 03 06:04:59 crc kubenswrapper[4810]: E1203 06:04:59.155169 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c\": container with ID starting with 72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c not found: ID does not exist" containerID="72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c" Dec 03 06:04:59 crc kubenswrapper[4810]: I1203 06:04:59.155225 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c"} err="failed to get container status \"72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c\": rpc error: code = NotFound desc = could not find container \"72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c\": container with ID starting with 72c781f62c9f81e0e65af8e59bb6714482eea6e81d8744cf92544374cda5e21c not found: ID does not exist" Dec 03 06:05:00 crc kubenswrapper[4810]: I1203 06:05:00.403506 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" path="/var/lib/kubelet/pods/81f29f2b-742a-4879-b0ab-0de51455d74a/volumes" Dec 03 06:05:02 crc kubenswrapper[4810]: I1203 06:05:02.057230 4810 generic.go:334] "Generic (PLEG): container finished" podID="f7a1032e-664c-477e-93be-b363dce922bb" containerID="f53ec88e5b47798295493b3bc65136124f4013ab6c78c8838773cdb380e75e27" exitCode=0 Dec 03 06:05:02 crc kubenswrapper[4810]: I1203 06:05:02.057342 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" event={"ID":"f7a1032e-664c-477e-93be-b363dce922bb","Type":"ContainerDied","Data":"f53ec88e5b47798295493b3bc65136124f4013ab6c78c8838773cdb380e75e27"} Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.706536 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.885825 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4drrn\" (UniqueName: \"kubernetes.io/projected/f7a1032e-664c-477e-93be-b363dce922bb-kube-api-access-4drrn\") pod \"f7a1032e-664c-477e-93be-b363dce922bb\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.886004 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-inventory\") pod \"f7a1032e-664c-477e-93be-b363dce922bb\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.886253 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-ssh-key\") pod \"f7a1032e-664c-477e-93be-b363dce922bb\" (UID: \"f7a1032e-664c-477e-93be-b363dce922bb\") " Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.907179 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a1032e-664c-477e-93be-b363dce922bb-kube-api-access-4drrn" (OuterVolumeSpecName: "kube-api-access-4drrn") pod "f7a1032e-664c-477e-93be-b363dce922bb" (UID: "f7a1032e-664c-477e-93be-b363dce922bb"). InnerVolumeSpecName "kube-api-access-4drrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.927343 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-inventory" (OuterVolumeSpecName: "inventory") pod "f7a1032e-664c-477e-93be-b363dce922bb" (UID: "f7a1032e-664c-477e-93be-b363dce922bb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.933540 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f7a1032e-664c-477e-93be-b363dce922bb" (UID: "f7a1032e-664c-477e-93be-b363dce922bb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.989798 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4drrn\" (UniqueName: \"kubernetes.io/projected/f7a1032e-664c-477e-93be-b363dce922bb-kube-api-access-4drrn\") on node \"crc\" DevicePath \"\"" Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.989840 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:05:03 crc kubenswrapper[4810]: I1203 06:05:03.989856 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7a1032e-664c-477e-93be-b363dce922bb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.090282 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" event={"ID":"f7a1032e-664c-477e-93be-b363dce922bb","Type":"ContainerDied","Data":"ee0cfa83d1e73b94903c1f2782ebef3fee3192eeb74bffc3bc6c54b50437d011"} Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.090333 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee0cfa83d1e73b94903c1f2782ebef3fee3192eeb74bffc3bc6c54b50437d011" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.090416 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-45tnh" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.227522 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j"] Dec 03 06:05:04 crc kubenswrapper[4810]: E1203 06:05:04.236499 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="extract-content" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.236543 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="extract-content" Dec 03 06:05:04 crc kubenswrapper[4810]: E1203 06:05:04.236580 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="registry-server" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.236593 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="registry-server" Dec 03 06:05:04 crc kubenswrapper[4810]: E1203 06:05:04.236672 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="extract-utilities" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.236685 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="extract-utilities" Dec 03 06:05:04 crc kubenswrapper[4810]: E1203 06:05:04.236713 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a1032e-664c-477e-93be-b363dce922bb" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.236730 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a1032e-664c-477e-93be-b363dce922bb" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.237246 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a1032e-664c-477e-93be-b363dce922bb" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.237319 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="81f29f2b-742a-4879-b0ab-0de51455d74a" containerName="registry-server" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.238614 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.240398 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j"] Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.241800 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.242003 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.242040 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.244639 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.300210 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k628x\" (UniqueName: \"kubernetes.io/projected/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-kube-api-access-k628x\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.300400 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.300717 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.300975 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.404040 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.406110 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.406454 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k628x\" (UniqueName: \"kubernetes.io/projected/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-kube-api-access-k628x\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.406792 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.411621 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.412432 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.416512 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.426018 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k628x\" (UniqueName: \"kubernetes.io/projected/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-kube-api-access-k628x\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f489j\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: E1203 06:05:04.521986 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice\": RecentStats: unable to find data in memory cache]" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.613662 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:05:04 crc kubenswrapper[4810]: I1203 06:05:04.998714 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j"] Dec 03 06:05:05 crc kubenswrapper[4810]: I1203 06:05:05.104072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" event={"ID":"4dc1bd47-9cbd-4849-b466-bf72ec92cf14","Type":"ContainerStarted","Data":"ca1b6c12492c9dc03006ed9e3ee51698ec15e8247084eec361dd3ae7cb08dfc1"} Dec 03 06:05:07 crc kubenswrapper[4810]: I1203 06:05:07.132336 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" event={"ID":"4dc1bd47-9cbd-4849-b466-bf72ec92cf14","Type":"ContainerStarted","Data":"4904715fdaf7052a103a48a646b689284c648f84041b914877d3039efcf87115"} Dec 03 06:05:07 crc kubenswrapper[4810]: I1203 06:05:07.196864 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" podStartSLOduration=2.292374948 podStartE2EDuration="3.196828747s" podCreationTimestamp="2025-12-03 06:05:04 +0000 UTC" firstStartedPulling="2025-12-03 06:05:05.004852966 +0000 UTC m=+1428.940313817" lastFinishedPulling="2025-12-03 06:05:05.909306775 +0000 UTC m=+1429.844767616" observedRunningTime="2025-12-03 06:05:07.163273194 +0000 UTC m=+1431.098734045" watchObservedRunningTime="2025-12-03 06:05:07.196828747 +0000 UTC m=+1431.132289598" Dec 03 06:05:14 crc kubenswrapper[4810]: E1203 06:05:14.793909 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69acf595_49b3_4d7f_a87d_70ab114ac467.slice\": RecentStats: unable to find data in memory cache]" Dec 03 06:05:26 crc kubenswrapper[4810]: I1203 06:05:26.253046 4810 scope.go:117] "RemoveContainer" containerID="0cf4e796041a12e4801958c73c1a93695154b39b2b775d5ab887ad209515f7f9" Dec 03 06:05:26 crc kubenswrapper[4810]: I1203 06:05:26.295542 4810 scope.go:117] "RemoveContainer" containerID="26135b25a697df12edb2fac87680236617396581660551e65868d5ae03578c46" Dec 03 06:06:26 crc kubenswrapper[4810]: I1203 06:06:26.399117 4810 scope.go:117] "RemoveContainer" containerID="41e10527d7ee5a5563cb77cc9e193a6aa676322a67c55a735abed2d4b8d56fcb" Dec 03 06:06:44 crc kubenswrapper[4810]: I1203 06:06:44.866691 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j26rc"] Dec 03 06:06:44 crc kubenswrapper[4810]: I1203 06:06:44.870359 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j26rc"] Dec 03 06:06:44 crc kubenswrapper[4810]: I1203 06:06:44.870491 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:44 crc kubenswrapper[4810]: I1203 06:06:44.950386 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgqtb\" (UniqueName: \"kubernetes.io/projected/3f93ae73-abff-4e0c-a1ff-56152c004e9d-kube-api-access-jgqtb\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:44 crc kubenswrapper[4810]: I1203 06:06:44.950879 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-catalog-content\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:44 crc kubenswrapper[4810]: I1203 06:06:44.950971 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-utilities\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.053631 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgqtb\" (UniqueName: \"kubernetes.io/projected/3f93ae73-abff-4e0c-a1ff-56152c004e9d-kube-api-access-jgqtb\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.054249 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-catalog-content\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.054425 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-utilities\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.055169 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-utilities\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.055637 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-catalog-content\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.078681 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgqtb\" (UniqueName: \"kubernetes.io/projected/3f93ae73-abff-4e0c-a1ff-56152c004e9d-kube-api-access-jgqtb\") pod \"certified-operators-j26rc\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.205900 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:45 crc kubenswrapper[4810]: I1203 06:06:45.751352 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j26rc"] Dec 03 06:06:46 crc kubenswrapper[4810]: I1203 06:06:46.514394 4810 generic.go:334] "Generic (PLEG): container finished" podID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerID="eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e" exitCode=0 Dec 03 06:06:46 crc kubenswrapper[4810]: I1203 06:06:46.514649 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j26rc" event={"ID":"3f93ae73-abff-4e0c-a1ff-56152c004e9d","Type":"ContainerDied","Data":"eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e"} Dec 03 06:06:46 crc kubenswrapper[4810]: I1203 06:06:46.514895 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j26rc" event={"ID":"3f93ae73-abff-4e0c-a1ff-56152c004e9d","Type":"ContainerStarted","Data":"54ea61e7c91bb4b71989c7b8852abcc505b60022b8fe811a236e45165be5f80e"} Dec 03 06:06:48 crc kubenswrapper[4810]: I1203 06:06:48.553160 4810 generic.go:334] "Generic (PLEG): container finished" podID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerID="92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20" exitCode=0 Dec 03 06:06:48 crc kubenswrapper[4810]: I1203 06:06:48.553299 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j26rc" event={"ID":"3f93ae73-abff-4e0c-a1ff-56152c004e9d","Type":"ContainerDied","Data":"92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20"} Dec 03 06:06:49 crc kubenswrapper[4810]: I1203 06:06:49.571258 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j26rc" event={"ID":"3f93ae73-abff-4e0c-a1ff-56152c004e9d","Type":"ContainerStarted","Data":"80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93"} Dec 03 06:06:49 crc kubenswrapper[4810]: I1203 06:06:49.613459 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j26rc" podStartSLOduration=3.065608403 podStartE2EDuration="5.61342127s" podCreationTimestamp="2025-12-03 06:06:44 +0000 UTC" firstStartedPulling="2025-12-03 06:06:46.519444606 +0000 UTC m=+1530.454905457" lastFinishedPulling="2025-12-03 06:06:49.067257453 +0000 UTC m=+1533.002718324" observedRunningTime="2025-12-03 06:06:49.598012155 +0000 UTC m=+1533.533472996" watchObservedRunningTime="2025-12-03 06:06:49.61342127 +0000 UTC m=+1533.548882151" Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.206942 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.208110 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.300438 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.678335 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.678471 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.752111 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:55 crc kubenswrapper[4810]: I1203 06:06:55.835589 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j26rc"] Dec 03 06:06:57 crc kubenswrapper[4810]: I1203 06:06:57.685515 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j26rc" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="registry-server" containerID="cri-o://80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93" gracePeriod=2 Dec 03 06:06:58 crc kubenswrapper[4810]: E1203 06:06:58.002575 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f93ae73_abff_4e0c_a1ff_56152c004e9d.slice/crio-conmon-80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93.scope\": RecentStats: unable to find data in memory cache]" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.220045 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.419924 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-catalog-content\") pod \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.420426 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-utilities\") pod \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.420591 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgqtb\" (UniqueName: \"kubernetes.io/projected/3f93ae73-abff-4e0c-a1ff-56152c004e9d-kube-api-access-jgqtb\") pod \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\" (UID: \"3f93ae73-abff-4e0c-a1ff-56152c004e9d\") " Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.422174 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-utilities" (OuterVolumeSpecName: "utilities") pod "3f93ae73-abff-4e0c-a1ff-56152c004e9d" (UID: "3f93ae73-abff-4e0c-a1ff-56152c004e9d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.428806 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f93ae73-abff-4e0c-a1ff-56152c004e9d-kube-api-access-jgqtb" (OuterVolumeSpecName: "kube-api-access-jgqtb") pod "3f93ae73-abff-4e0c-a1ff-56152c004e9d" (UID: "3f93ae73-abff-4e0c-a1ff-56152c004e9d"). InnerVolumeSpecName "kube-api-access-jgqtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.488216 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f93ae73-abff-4e0c-a1ff-56152c004e9d" (UID: "3f93ae73-abff-4e0c-a1ff-56152c004e9d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.529189 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.529266 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f93ae73-abff-4e0c-a1ff-56152c004e9d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.529300 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgqtb\" (UniqueName: \"kubernetes.io/projected/3f93ae73-abff-4e0c-a1ff-56152c004e9d-kube-api-access-jgqtb\") on node \"crc\" DevicePath \"\"" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.700764 4810 generic.go:334] "Generic (PLEG): container finished" podID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerID="80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93" exitCode=0 Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.700829 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j26rc" event={"ID":"3f93ae73-abff-4e0c-a1ff-56152c004e9d","Type":"ContainerDied","Data":"80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93"} Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.700871 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j26rc" event={"ID":"3f93ae73-abff-4e0c-a1ff-56152c004e9d","Type":"ContainerDied","Data":"54ea61e7c91bb4b71989c7b8852abcc505b60022b8fe811a236e45165be5f80e"} Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.700875 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j26rc" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.700900 4810 scope.go:117] "RemoveContainer" containerID="80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.751019 4810 scope.go:117] "RemoveContainer" containerID="92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.752985 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j26rc"] Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.767293 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j26rc"] Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.787423 4810 scope.go:117] "RemoveContainer" containerID="eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.847956 4810 scope.go:117] "RemoveContainer" containerID="80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93" Dec 03 06:06:58 crc kubenswrapper[4810]: E1203 06:06:58.848449 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93\": container with ID starting with 80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93 not found: ID does not exist" containerID="80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.848490 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93"} err="failed to get container status \"80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93\": rpc error: code = NotFound desc = could not find container \"80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93\": container with ID starting with 80999b5ef9c400f902485c829d400d3ac317d981afe8ffe7efdb1c5e3e3ada93 not found: ID does not exist" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.848541 4810 scope.go:117] "RemoveContainer" containerID="92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20" Dec 03 06:06:58 crc kubenswrapper[4810]: E1203 06:06:58.849095 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20\": container with ID starting with 92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20 not found: ID does not exist" containerID="92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.849128 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20"} err="failed to get container status \"92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20\": rpc error: code = NotFound desc = could not find container \"92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20\": container with ID starting with 92619c9f473dc6af35f3009bae419710811b2aee7fe04173332ff47de9028b20 not found: ID does not exist" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.849148 4810 scope.go:117] "RemoveContainer" containerID="eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e" Dec 03 06:06:58 crc kubenswrapper[4810]: E1203 06:06:58.849654 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e\": container with ID starting with eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e not found: ID does not exist" containerID="eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e" Dec 03 06:06:58 crc kubenswrapper[4810]: I1203 06:06:58.849797 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e"} err="failed to get container status \"eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e\": rpc error: code = NotFound desc = could not find container \"eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e\": container with ID starting with eef09f02cd82a76bfdbb8b32499e8e05cc7cb402be7e8d1e531f04896d94749e not found: ID does not exist" Dec 03 06:07:00 crc kubenswrapper[4810]: I1203 06:07:00.402308 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" path="/var/lib/kubelet/pods/3f93ae73-abff-4e0c-a1ff-56152c004e9d/volumes" Dec 03 06:07:25 crc kubenswrapper[4810]: I1203 06:07:25.677634 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:07:25 crc kubenswrapper[4810]: I1203 06:07:25.678406 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:07:26 crc kubenswrapper[4810]: I1203 06:07:26.592611 4810 scope.go:117] "RemoveContainer" containerID="6834d032e8428dcc5ea67c540e14c487a263bc7e9102facbf436ab14be87d36b" Dec 03 06:07:26 crc kubenswrapper[4810]: I1203 06:07:26.629042 4810 scope.go:117] "RemoveContainer" containerID="dbcbfee7463650f67ef1f74a2f1be71afdc5dbd9d179e0693c73eecf49694baa" Dec 03 06:07:55 crc kubenswrapper[4810]: I1203 06:07:55.677193 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:07:55 crc kubenswrapper[4810]: I1203 06:07:55.678202 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:07:55 crc kubenswrapper[4810]: I1203 06:07:55.678301 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:07:55 crc kubenswrapper[4810]: I1203 06:07:55.679446 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:07:55 crc kubenswrapper[4810]: I1203 06:07:55.679526 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" gracePeriod=600 Dec 03 06:07:55 crc kubenswrapper[4810]: E1203 06:07:55.815819 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:07:56 crc kubenswrapper[4810]: I1203 06:07:56.486263 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" exitCode=0 Dec 03 06:07:56 crc kubenswrapper[4810]: I1203 06:07:56.486323 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299"} Dec 03 06:07:56 crc kubenswrapper[4810]: I1203 06:07:56.486374 4810 scope.go:117] "RemoveContainer" containerID="c2d89f1bb4f8093d6c6c727461ec0b9a6e075b6b1856024e1665f68befa27390" Dec 03 06:07:56 crc kubenswrapper[4810]: I1203 06:07:56.487460 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:07:56 crc kubenswrapper[4810]: E1203 06:07:56.487848 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:08:07 crc kubenswrapper[4810]: I1203 06:08:07.377855 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:08:07 crc kubenswrapper[4810]: E1203 06:08:07.379065 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:08:21 crc kubenswrapper[4810]: I1203 06:08:21.377814 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:08:21 crc kubenswrapper[4810]: E1203 06:08:21.380565 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:08:31 crc kubenswrapper[4810]: I1203 06:08:31.973573 4810 generic.go:334] "Generic (PLEG): container finished" podID="4dc1bd47-9cbd-4849-b466-bf72ec92cf14" containerID="4904715fdaf7052a103a48a646b689284c648f84041b914877d3039efcf87115" exitCode=0 Dec 03 06:08:31 crc kubenswrapper[4810]: I1203 06:08:31.974301 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" event={"ID":"4dc1bd47-9cbd-4849-b466-bf72ec92cf14","Type":"ContainerDied","Data":"4904715fdaf7052a103a48a646b689284c648f84041b914877d3039efcf87115"} Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.568271 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.684241 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-inventory\") pod \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.684354 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-bootstrap-combined-ca-bundle\") pod \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.684575 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-ssh-key\") pod \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.684976 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k628x\" (UniqueName: \"kubernetes.io/projected/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-kube-api-access-k628x\") pod \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\" (UID: \"4dc1bd47-9cbd-4849-b466-bf72ec92cf14\") " Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.695659 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4dc1bd47-9cbd-4849-b466-bf72ec92cf14" (UID: "4dc1bd47-9cbd-4849-b466-bf72ec92cf14"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.696620 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-kube-api-access-k628x" (OuterVolumeSpecName: "kube-api-access-k628x") pod "4dc1bd47-9cbd-4849-b466-bf72ec92cf14" (UID: "4dc1bd47-9cbd-4849-b466-bf72ec92cf14"). InnerVolumeSpecName "kube-api-access-k628x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.737948 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4dc1bd47-9cbd-4849-b466-bf72ec92cf14" (UID: "4dc1bd47-9cbd-4849-b466-bf72ec92cf14"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.740144 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-inventory" (OuterVolumeSpecName: "inventory") pod "4dc1bd47-9cbd-4849-b466-bf72ec92cf14" (UID: "4dc1bd47-9cbd-4849-b466-bf72ec92cf14"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.788111 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k628x\" (UniqueName: \"kubernetes.io/projected/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-kube-api-access-k628x\") on node \"crc\" DevicePath \"\"" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.788349 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.788466 4810 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:08:33 crc kubenswrapper[4810]: I1203 06:08:33.788601 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dc1bd47-9cbd-4849-b466-bf72ec92cf14-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.012899 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" event={"ID":"4dc1bd47-9cbd-4849-b466-bf72ec92cf14","Type":"ContainerDied","Data":"ca1b6c12492c9dc03006ed9e3ee51698ec15e8247084eec361dd3ae7cb08dfc1"} Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.012960 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f489j" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.012974 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca1b6c12492c9dc03006ed9e3ee51698ec15e8247084eec361dd3ae7cb08dfc1" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.171151 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf"] Dec 03 06:08:34 crc kubenswrapper[4810]: E1203 06:08:34.171688 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc1bd47-9cbd-4849-b466-bf72ec92cf14" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.171717 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc1bd47-9cbd-4849-b466-bf72ec92cf14" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 06:08:34 crc kubenswrapper[4810]: E1203 06:08:34.171766 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="extract-utilities" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.171777 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="extract-utilities" Dec 03 06:08:34 crc kubenswrapper[4810]: E1203 06:08:34.171805 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="extract-content" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.171815 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="extract-content" Dec 03 06:08:34 crc kubenswrapper[4810]: E1203 06:08:34.171830 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="registry-server" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.171839 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="registry-server" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.172084 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f93ae73-abff-4e0c-a1ff-56152c004e9d" containerName="registry-server" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.172111 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc1bd47-9cbd-4849-b466-bf72ec92cf14" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.173141 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.178503 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.178533 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.183156 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.184059 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.194840 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf"] Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.320610 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.321222 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f86ts\" (UniqueName: \"kubernetes.io/projected/15aa89be-fc4f-4965-99f0-3eb7bce02b10-kube-api-access-f86ts\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.321644 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.424434 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.424940 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.425071 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f86ts\" (UniqueName: \"kubernetes.io/projected/15aa89be-fc4f-4965-99f0-3eb7bce02b10-kube-api-access-f86ts\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.432977 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.439022 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.456469 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f86ts\" (UniqueName: \"kubernetes.io/projected/15aa89be-fc4f-4965-99f0-3eb7bce02b10-kube-api-access-f86ts\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:34 crc kubenswrapper[4810]: I1203 06:08:34.515572 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:08:35 crc kubenswrapper[4810]: I1203 06:08:35.200595 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf"] Dec 03 06:08:35 crc kubenswrapper[4810]: I1203 06:08:35.207531 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:08:35 crc kubenswrapper[4810]: I1203 06:08:35.378451 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:08:35 crc kubenswrapper[4810]: E1203 06:08:35.378953 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:08:36 crc kubenswrapper[4810]: I1203 06:08:36.043136 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" event={"ID":"15aa89be-fc4f-4965-99f0-3eb7bce02b10","Type":"ContainerStarted","Data":"031084dfb746e73e6d2e57351ce475f89712edf7619b6b7508f2327a99d77fd8"} Dec 03 06:08:37 crc kubenswrapper[4810]: I1203 06:08:37.079234 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" event={"ID":"15aa89be-fc4f-4965-99f0-3eb7bce02b10","Type":"ContainerStarted","Data":"92a5ab51534c23c9703705fea184d0cd8b6c0d57b6d80b2a68b0f1b96421dc1d"} Dec 03 06:08:37 crc kubenswrapper[4810]: I1203 06:08:37.116613 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" podStartSLOduration=2.374054391 podStartE2EDuration="3.116568817s" podCreationTimestamp="2025-12-03 06:08:34 +0000 UTC" firstStartedPulling="2025-12-03 06:08:35.206789224 +0000 UTC m=+1639.142250105" lastFinishedPulling="2025-12-03 06:08:35.94930368 +0000 UTC m=+1639.884764531" observedRunningTime="2025-12-03 06:08:37.106128132 +0000 UTC m=+1641.041589013" watchObservedRunningTime="2025-12-03 06:08:37.116568817 +0000 UTC m=+1641.052029688" Dec 03 06:08:50 crc kubenswrapper[4810]: I1203 06:08:50.378103 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:08:50 crc kubenswrapper[4810]: E1203 06:08:50.380597 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:08:59 crc kubenswrapper[4810]: I1203 06:08:59.074096 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-f5r7f"] Dec 03 06:08:59 crc kubenswrapper[4810]: I1203 06:08:59.087872 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-1d40-account-create-update-88mwj"] Dec 03 06:08:59 crc kubenswrapper[4810]: I1203 06:08:59.099809 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-qbv8v"] Dec 03 06:08:59 crc kubenswrapper[4810]: I1203 06:08:59.110216 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-f5r7f"] Dec 03 06:08:59 crc kubenswrapper[4810]: I1203 06:08:59.118163 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-1d40-account-create-update-88mwj"] Dec 03 06:08:59 crc kubenswrapper[4810]: I1203 06:08:59.125619 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-qbv8v"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.040772 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-fa4a-account-create-update-rv5r5"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.054631 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-576sj"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.068621 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5e23-account-create-update-nkzsd"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.078652 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-576sj"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.085991 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-fa4a-account-create-update-rv5r5"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.093707 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5e23-account-create-update-nkzsd"] Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.399084 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1344a6a7-87f7-4e67-abfd-16d32643169b" path="/var/lib/kubelet/pods/1344a6a7-87f7-4e67-abfd-16d32643169b/volumes" Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.400389 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a713ace-ef43-4698-9691-e961d0cf1b48" path="/var/lib/kubelet/pods/1a713ace-ef43-4698-9691-e961d0cf1b48/volumes" Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.401564 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5796a4ae-d33d-479d-89d3-a23c76988ffa" path="/var/lib/kubelet/pods/5796a4ae-d33d-479d-89d3-a23c76988ffa/volumes" Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.402684 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7582561b-f4c4-4695-8173-2dc6a168d119" path="/var/lib/kubelet/pods/7582561b-f4c4-4695-8173-2dc6a168d119/volumes" Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.405482 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92911999-e22a-4a27-ab41-8fe265136906" path="/var/lib/kubelet/pods/92911999-e22a-4a27-ab41-8fe265136906/volumes" Dec 03 06:09:00 crc kubenswrapper[4810]: I1203 06:09:00.406679 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee6fdc90-8159-4a54-8758-74a9fdcd83f7" path="/var/lib/kubelet/pods/ee6fdc90-8159-4a54-8758-74a9fdcd83f7/volumes" Dec 03 06:09:02 crc kubenswrapper[4810]: I1203 06:09:02.378670 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:09:02 crc kubenswrapper[4810]: E1203 06:09:02.379844 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:09:17 crc kubenswrapper[4810]: I1203 06:09:17.378570 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:09:17 crc kubenswrapper[4810]: E1203 06:09:17.380341 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.411930 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m26cn"] Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.419219 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.426377 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m26cn"] Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.554022 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-utilities\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.554760 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzs7r\" (UniqueName: \"kubernetes.io/projected/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-kube-api-access-fzs7r\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.554881 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-catalog-content\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.657155 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzs7r\" (UniqueName: \"kubernetes.io/projected/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-kube-api-access-fzs7r\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.657259 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-catalog-content\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.657350 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-utilities\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.657949 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-utilities\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.657966 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-catalog-content\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.683573 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzs7r\" (UniqueName: \"kubernetes.io/projected/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-kube-api-access-fzs7r\") pod \"redhat-marketplace-m26cn\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:22 crc kubenswrapper[4810]: I1203 06:09:22.756192 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:23 crc kubenswrapper[4810]: I1203 06:09:23.327758 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m26cn"] Dec 03 06:09:24 crc kubenswrapper[4810]: I1203 06:09:24.017437 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerID="5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c" exitCode=0 Dec 03 06:09:24 crc kubenswrapper[4810]: I1203 06:09:24.017576 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m26cn" event={"ID":"4f530959-8a3a-4baa-86ea-80ec3cbe9d31","Type":"ContainerDied","Data":"5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c"} Dec 03 06:09:24 crc kubenswrapper[4810]: I1203 06:09:24.019148 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m26cn" event={"ID":"4f530959-8a3a-4baa-86ea-80ec3cbe9d31","Type":"ContainerStarted","Data":"6f88befcfc7e05a5950ec0893e454dff69a9b577ca08d9fab56cc7ead8edce99"} Dec 03 06:09:25 crc kubenswrapper[4810]: I1203 06:09:25.034090 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerID="e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4" exitCode=0 Dec 03 06:09:25 crc kubenswrapper[4810]: I1203 06:09:25.034239 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m26cn" event={"ID":"4f530959-8a3a-4baa-86ea-80ec3cbe9d31","Type":"ContainerDied","Data":"e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4"} Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.050607 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m26cn" event={"ID":"4f530959-8a3a-4baa-86ea-80ec3cbe9d31","Type":"ContainerStarted","Data":"9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e"} Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.119044 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m26cn" podStartSLOduration=2.737419635 podStartE2EDuration="4.119008234s" podCreationTimestamp="2025-12-03 06:09:22 +0000 UTC" firstStartedPulling="2025-12-03 06:09:24.02155712 +0000 UTC m=+1687.957017971" lastFinishedPulling="2025-12-03 06:09:25.403145689 +0000 UTC m=+1689.338606570" observedRunningTime="2025-12-03 06:09:26.088033779 +0000 UTC m=+1690.023494660" watchObservedRunningTime="2025-12-03 06:09:26.119008234 +0000 UTC m=+1690.054469085" Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.733982 4810 scope.go:117] "RemoveContainer" containerID="ead0de28b374408e2839b099760e73945919ffc25dd29b2bdec3b43697d03f6d" Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.768321 4810 scope.go:117] "RemoveContainer" containerID="30f05921d643db8f8496f5f79f09818910aedf90f3eaf057164509c70b8ac31d" Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.814900 4810 scope.go:117] "RemoveContainer" containerID="1e3e75cecd55f1f40db1ab1f4e8a8491d6d21c0e6d027671d830acf24a4d5b4f" Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.882836 4810 scope.go:117] "RemoveContainer" containerID="6c19beacf8ffa1a80ffeb23c82e1b2849c01c91c6d1f14f24e9823ceba8ac4a5" Dec 03 06:09:26 crc kubenswrapper[4810]: I1203 06:09:26.950580 4810 scope.go:117] "RemoveContainer" containerID="7bf074001fa81733ac1008f1545da347c8d746b94dd3f2e3b231f5ebfa756a12" Dec 03 06:09:27 crc kubenswrapper[4810]: I1203 06:09:27.004575 4810 scope.go:117] "RemoveContainer" containerID="ddd9e693285d5135ceccf43ad4406d38f2ca56fba08ca7bc3a7bc0e805349e6d" Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.066683 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-900e-account-create-update-rvcrl"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.095008 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-9e2e-account-create-update-gzghr"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.106178 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-p57j4"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.118615 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-xzmmt"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.131229 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-hl4nn"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.142919 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2d53-account-create-update-49k7c"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.154791 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-900e-account-create-update-rvcrl"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.165032 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-hl4nn"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.172350 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-p57j4"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.179049 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-9e2e-account-create-update-gzghr"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.186175 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-xzmmt"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.194873 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2d53-account-create-update-49k7c"] Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.396082 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08cc50ca-8fbc-48c0-8a9d-11baf452f56b" path="/var/lib/kubelet/pods/08cc50ca-8fbc-48c0-8a9d-11baf452f56b/volumes" Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.397295 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="091611a7-14c1-42e6-9f3b-1984d9cb31b3" path="/var/lib/kubelet/pods/091611a7-14c1-42e6-9f3b-1984d9cb31b3/volumes" Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.398711 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20db688f-f2bb-4ca4-b5b3-638a3f9a2e57" path="/var/lib/kubelet/pods/20db688f-f2bb-4ca4-b5b3-638a3f9a2e57/volumes" Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.400284 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="964efc7e-8dae-494a-903a-7208635ff931" path="/var/lib/kubelet/pods/964efc7e-8dae-494a-903a-7208635ff931/volumes" Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.402509 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d24a4433-214f-4641-a1c4-ec8a35f420ed" path="/var/lib/kubelet/pods/d24a4433-214f-4641-a1c4-ec8a35f420ed/volumes" Dec 03 06:09:28 crc kubenswrapper[4810]: I1203 06:09:28.403707 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a03dec-82f1-44d6-b7b2-700cc7df3cac" path="/var/lib/kubelet/pods/f7a03dec-82f1-44d6-b7b2-700cc7df3cac/volumes" Dec 03 06:09:31 crc kubenswrapper[4810]: I1203 06:09:31.378414 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:09:31 crc kubenswrapper[4810]: E1203 06:09:31.380298 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:09:32 crc kubenswrapper[4810]: I1203 06:09:32.756953 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:32 crc kubenswrapper[4810]: I1203 06:09:32.757582 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:32 crc kubenswrapper[4810]: I1203 06:09:32.839370 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:33 crc kubenswrapper[4810]: I1203 06:09:33.259898 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:33 crc kubenswrapper[4810]: I1203 06:09:33.378531 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m26cn"] Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.189970 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m26cn" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="registry-server" containerID="cri-o://9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e" gracePeriod=2 Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.787484 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.906234 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzs7r\" (UniqueName: \"kubernetes.io/projected/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-kube-api-access-fzs7r\") pod \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.906370 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-catalog-content\") pod \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.906673 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-utilities\") pod \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\" (UID: \"4f530959-8a3a-4baa-86ea-80ec3cbe9d31\") " Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.908889 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-utilities" (OuterVolumeSpecName: "utilities") pod "4f530959-8a3a-4baa-86ea-80ec3cbe9d31" (UID: "4f530959-8a3a-4baa-86ea-80ec3cbe9d31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.925690 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-kube-api-access-fzs7r" (OuterVolumeSpecName: "kube-api-access-fzs7r") pod "4f530959-8a3a-4baa-86ea-80ec3cbe9d31" (UID: "4f530959-8a3a-4baa-86ea-80ec3cbe9d31"). InnerVolumeSpecName "kube-api-access-fzs7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:09:35 crc kubenswrapper[4810]: I1203 06:09:35.940198 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f530959-8a3a-4baa-86ea-80ec3cbe9d31" (UID: "4f530959-8a3a-4baa-86ea-80ec3cbe9d31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.010606 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzs7r\" (UniqueName: \"kubernetes.io/projected/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-kube-api-access-fzs7r\") on node \"crc\" DevicePath \"\"" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.011004 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.011139 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f530959-8a3a-4baa-86ea-80ec3cbe9d31-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.209336 4810 generic.go:334] "Generic (PLEG): container finished" podID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerID="9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e" exitCode=0 Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.209428 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m26cn" event={"ID":"4f530959-8a3a-4baa-86ea-80ec3cbe9d31","Type":"ContainerDied","Data":"9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e"} Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.209475 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m26cn" event={"ID":"4f530959-8a3a-4baa-86ea-80ec3cbe9d31","Type":"ContainerDied","Data":"6f88befcfc7e05a5950ec0893e454dff69a9b577ca08d9fab56cc7ead8edce99"} Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.209511 4810 scope.go:117] "RemoveContainer" containerID="9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.209788 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m26cn" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.243860 4810 scope.go:117] "RemoveContainer" containerID="e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.278776 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m26cn"] Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.287807 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m26cn"] Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.302515 4810 scope.go:117] "RemoveContainer" containerID="5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.381521 4810 scope.go:117] "RemoveContainer" containerID="9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e" Dec 03 06:09:36 crc kubenswrapper[4810]: E1203 06:09:36.382563 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e\": container with ID starting with 9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e not found: ID does not exist" containerID="9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.382654 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e"} err="failed to get container status \"9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e\": rpc error: code = NotFound desc = could not find container \"9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e\": container with ID starting with 9c6d7b937f2c3af72d11e6e21432e63274ee935d0c7386f5baf0e4db3a89251e not found: ID does not exist" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.382694 4810 scope.go:117] "RemoveContainer" containerID="e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4" Dec 03 06:09:36 crc kubenswrapper[4810]: E1203 06:09:36.383321 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4\": container with ID starting with e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4 not found: ID does not exist" containerID="e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.383409 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4"} err="failed to get container status \"e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4\": rpc error: code = NotFound desc = could not find container \"e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4\": container with ID starting with e93d5818662ca0ce6bccf93d927d3340f51023c0c23d3fbc60bbe47e34e5b5b4 not found: ID does not exist" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.383478 4810 scope.go:117] "RemoveContainer" containerID="5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c" Dec 03 06:09:36 crc kubenswrapper[4810]: E1203 06:09:36.384268 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c\": container with ID starting with 5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c not found: ID does not exist" containerID="5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.384319 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c"} err="failed to get container status \"5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c\": rpc error: code = NotFound desc = could not find container \"5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c\": container with ID starting with 5426c0b68fd40fa4c85a5bcff095e4b16e4c3ce2a6af1cf575ceb809cffe370c not found: ID does not exist" Dec 03 06:09:36 crc kubenswrapper[4810]: I1203 06:09:36.400233 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" path="/var/lib/kubelet/pods/4f530959-8a3a-4baa-86ea-80ec3cbe9d31/volumes" Dec 03 06:09:38 crc kubenswrapper[4810]: I1203 06:09:38.046165 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-87m89"] Dec 03 06:09:38 crc kubenswrapper[4810]: I1203 06:09:38.064570 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-87m89"] Dec 03 06:09:38 crc kubenswrapper[4810]: I1203 06:09:38.395695 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f10c1fe-10b9-49e1-ae18-5bd77dda7932" path="/var/lib/kubelet/pods/9f10c1fe-10b9-49e1-ae18-5bd77dda7932/volumes" Dec 03 06:09:46 crc kubenswrapper[4810]: I1203 06:09:46.395016 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:09:46 crc kubenswrapper[4810]: E1203 06:09:46.396679 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:10:01 crc kubenswrapper[4810]: I1203 06:10:01.377674 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:10:01 crc kubenswrapper[4810]: E1203 06:10:01.379178 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:10:06 crc kubenswrapper[4810]: I1203 06:10:06.074182 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-xwrlk"] Dec 03 06:10:06 crc kubenswrapper[4810]: I1203 06:10:06.093310 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-xwrlk"] Dec 03 06:10:06 crc kubenswrapper[4810]: I1203 06:10:06.394800 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd194c59-21ca-4b1f-b269-a2844d332781" path="/var/lib/kubelet/pods/dd194c59-21ca-4b1f-b269-a2844d332781/volumes" Dec 03 06:10:11 crc kubenswrapper[4810]: I1203 06:10:11.032901 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-sg2m8"] Dec 03 06:10:11 crc kubenswrapper[4810]: I1203 06:10:11.040393 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-sg2m8"] Dec 03 06:10:12 crc kubenswrapper[4810]: I1203 06:10:12.399950 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ad186f-b9b2-43c4-8b88-8c6df56cd132" path="/var/lib/kubelet/pods/83ad186f-b9b2-43c4-8b88-8c6df56cd132/volumes" Dec 03 06:10:14 crc kubenswrapper[4810]: I1203 06:10:14.056160 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-hvg5q"] Dec 03 06:10:14 crc kubenswrapper[4810]: I1203 06:10:14.085567 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-hvg5q"] Dec 03 06:10:14 crc kubenswrapper[4810]: I1203 06:10:14.424578 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2b44955-f7f1-4819-b948-e82272f18a2b" path="/var/lib/kubelet/pods/d2b44955-f7f1-4819-b948-e82272f18a2b/volumes" Dec 03 06:10:15 crc kubenswrapper[4810]: I1203 06:10:15.031123 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-mmgx4"] Dec 03 06:10:15 crc kubenswrapper[4810]: I1203 06:10:15.041870 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-mmgx4"] Dec 03 06:10:15 crc kubenswrapper[4810]: I1203 06:10:15.379457 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:10:15 crc kubenswrapper[4810]: E1203 06:10:15.379954 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:10:16 crc kubenswrapper[4810]: I1203 06:10:16.044976 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-nqvx4"] Dec 03 06:10:16 crc kubenswrapper[4810]: I1203 06:10:16.065311 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-nqvx4"] Dec 03 06:10:16 crc kubenswrapper[4810]: I1203 06:10:16.401373 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33fa5884-f2c0-4391-a719-81c4d43605dc" path="/var/lib/kubelet/pods/33fa5884-f2c0-4391-a719-81c4d43605dc/volumes" Dec 03 06:10:16 crc kubenswrapper[4810]: I1203 06:10:16.403137 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f374320-accd-46d6-a286-3f2bac0a4180" path="/var/lib/kubelet/pods/4f374320-accd-46d6-a286-3f2bac0a4180/volumes" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.189994 4810 scope.go:117] "RemoveContainer" containerID="9d9a18de7c81871d5c433d54075dfe9ecdec8cf7a4b22b8fa5e4eb54d3bb4a65" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.229093 4810 scope.go:117] "RemoveContainer" containerID="9d215bf807965bd20646a7af7e21f9929cc210cc14d8e5cc34002f2d8a0485ee" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.346710 4810 scope.go:117] "RemoveContainer" containerID="d0483bcf594721134f7ddfd975526d1b9874fe8a364e5c08c8755bac096eb292" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.422823 4810 scope.go:117] "RemoveContainer" containerID="2f8b0a1f681ad6855266a27fe17d32b9a83bb08ab6edc4c9801af6211f538a5f" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.462652 4810 scope.go:117] "RemoveContainer" containerID="83f723c58e8ca4a6b565e340bec7fc076201d79050429253fcd11b5fd05b6440" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.507722 4810 scope.go:117] "RemoveContainer" containerID="b7b0251c52336419515a4fd46c4ce2ffe9cda305a485333a3cd4e9bff263a99b" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.571889 4810 scope.go:117] "RemoveContainer" containerID="e7c6f1e0c4789979ea78445cd91ba458541652def5e88bb86fd95a66a4ae21b0" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.605808 4810 scope.go:117] "RemoveContainer" containerID="9e9485aeee115ea4f2390600c5b9dcca925e30ae7f6616f8cc5a137643dba197" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.634227 4810 scope.go:117] "RemoveContainer" containerID="e5bb8678fcd9ddf1536907101eba0d146d71c6e9170dda6dbf817287dee6ad43" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.694805 4810 scope.go:117] "RemoveContainer" containerID="92ee5f26cb111ef92be899386e950ff0fb2f077896ee97e05a274d74bd87401e" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.765003 4810 scope.go:117] "RemoveContainer" containerID="e0cbbe960b81988c6280994b51906994971db03494dd0a0a5692e7eb2e4b780c" Dec 03 06:10:27 crc kubenswrapper[4810]: I1203 06:10:27.830720 4810 scope.go:117] "RemoveContainer" containerID="28df5dc97eb2771b9c89c2c2dd7df831bcd227e840198762d64285bed96490e8" Dec 03 06:10:28 crc kubenswrapper[4810]: I1203 06:10:28.378247 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:10:28 crc kubenswrapper[4810]: E1203 06:10:28.378713 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:10:34 crc kubenswrapper[4810]: I1203 06:10:34.056408 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-z4zwh"] Dec 03 06:10:34 crc kubenswrapper[4810]: I1203 06:10:34.071122 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-z4zwh"] Dec 03 06:10:34 crc kubenswrapper[4810]: I1203 06:10:34.394929 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36ce16c9-937b-4bdb-b5b1-09003d013c3a" path="/var/lib/kubelet/pods/36ce16c9-937b-4bdb-b5b1-09003d013c3a/volumes" Dec 03 06:10:43 crc kubenswrapper[4810]: I1203 06:10:43.379035 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:10:43 crc kubenswrapper[4810]: E1203 06:10:43.380327 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:10:51 crc kubenswrapper[4810]: I1203 06:10:51.228153 4810 generic.go:334] "Generic (PLEG): container finished" podID="15aa89be-fc4f-4965-99f0-3eb7bce02b10" containerID="92a5ab51534c23c9703705fea184d0cd8b6c0d57b6d80b2a68b0f1b96421dc1d" exitCode=0 Dec 03 06:10:51 crc kubenswrapper[4810]: I1203 06:10:51.228283 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" event={"ID":"15aa89be-fc4f-4965-99f0-3eb7bce02b10","Type":"ContainerDied","Data":"92a5ab51534c23c9703705fea184d0cd8b6c0d57b6d80b2a68b0f1b96421dc1d"} Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.685825 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.879661 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-inventory\") pod \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.879759 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f86ts\" (UniqueName: \"kubernetes.io/projected/15aa89be-fc4f-4965-99f0-3eb7bce02b10-kube-api-access-f86ts\") pod \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.880130 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-ssh-key\") pod \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\" (UID: \"15aa89be-fc4f-4965-99f0-3eb7bce02b10\") " Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.891144 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15aa89be-fc4f-4965-99f0-3eb7bce02b10-kube-api-access-f86ts" (OuterVolumeSpecName: "kube-api-access-f86ts") pod "15aa89be-fc4f-4965-99f0-3eb7bce02b10" (UID: "15aa89be-fc4f-4965-99f0-3eb7bce02b10"). InnerVolumeSpecName "kube-api-access-f86ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.926378 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "15aa89be-fc4f-4965-99f0-3eb7bce02b10" (UID: "15aa89be-fc4f-4965-99f0-3eb7bce02b10"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.933410 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-inventory" (OuterVolumeSpecName: "inventory") pod "15aa89be-fc4f-4965-99f0-3eb7bce02b10" (UID: "15aa89be-fc4f-4965-99f0-3eb7bce02b10"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.983751 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.983797 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15aa89be-fc4f-4965-99f0-3eb7bce02b10-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:10:52 crc kubenswrapper[4810]: I1203 06:10:52.983814 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f86ts\" (UniqueName: \"kubernetes.io/projected/15aa89be-fc4f-4965-99f0-3eb7bce02b10-kube-api-access-f86ts\") on node \"crc\" DevicePath \"\"" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.258412 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" event={"ID":"15aa89be-fc4f-4965-99f0-3eb7bce02b10","Type":"ContainerDied","Data":"031084dfb746e73e6d2e57351ce475f89712edf7619b6b7508f2327a99d77fd8"} Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.259152 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="031084dfb746e73e6d2e57351ce475f89712edf7619b6b7508f2327a99d77fd8" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.258635 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.369955 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k"] Dec 03 06:10:53 crc kubenswrapper[4810]: E1203 06:10:53.370418 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="registry-server" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.370443 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="registry-server" Dec 03 06:10:53 crc kubenswrapper[4810]: E1203 06:10:53.370458 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="extract-content" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.370467 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="extract-content" Dec 03 06:10:53 crc kubenswrapper[4810]: E1203 06:10:53.370487 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15aa89be-fc4f-4965-99f0-3eb7bce02b10" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.370497 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="15aa89be-fc4f-4965-99f0-3eb7bce02b10" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 06:10:53 crc kubenswrapper[4810]: E1203 06:10:53.370541 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="extract-utilities" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.370549 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="extract-utilities" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.370796 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="15aa89be-fc4f-4965-99f0-3eb7bce02b10" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.370825 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f530959-8a3a-4baa-86ea-80ec3cbe9d31" containerName="registry-server" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.371654 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.374679 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.376301 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.381956 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.382521 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.396795 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrdbx\" (UniqueName: \"kubernetes.io/projected/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-kube-api-access-qrdbx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.396888 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.397002 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.399239 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k"] Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.498989 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrdbx\" (UniqueName: \"kubernetes.io/projected/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-kube-api-access-qrdbx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.499087 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.499219 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.504644 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.505280 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.520757 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrdbx\" (UniqueName: \"kubernetes.io/projected/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-kube-api-access-qrdbx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:53 crc kubenswrapper[4810]: I1203 06:10:53.696785 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:10:54 crc kubenswrapper[4810]: I1203 06:10:54.458946 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k"] Dec 03 06:10:55 crc kubenswrapper[4810]: I1203 06:10:55.283829 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" event={"ID":"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f","Type":"ContainerStarted","Data":"96bb951b39c5a5a78c18a88df96f112c4ebb423880be5d38e2ee03b8ac99a5f2"} Dec 03 06:10:55 crc kubenswrapper[4810]: I1203 06:10:55.286279 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" event={"ID":"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f","Type":"ContainerStarted","Data":"641ea77c20dcd49f8a18851aef6ace45d33c16fb7426c5788d58a6f360bd0099"} Dec 03 06:10:55 crc kubenswrapper[4810]: I1203 06:10:55.314526 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" podStartSLOduration=1.8706193519999998 podStartE2EDuration="2.314505054s" podCreationTimestamp="2025-12-03 06:10:53 +0000 UTC" firstStartedPulling="2025-12-03 06:10:54.467326057 +0000 UTC m=+1778.402786898" lastFinishedPulling="2025-12-03 06:10:54.911211759 +0000 UTC m=+1778.846672600" observedRunningTime="2025-12-03 06:10:55.310828768 +0000 UTC m=+1779.246289609" watchObservedRunningTime="2025-12-03 06:10:55.314505054 +0000 UTC m=+1779.249965925" Dec 03 06:10:58 crc kubenswrapper[4810]: I1203 06:10:58.378825 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:10:58 crc kubenswrapper[4810]: E1203 06:10:58.379681 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:11:11 crc kubenswrapper[4810]: I1203 06:11:11.377523 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:11:11 crc kubenswrapper[4810]: E1203 06:11:11.381363 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.071382 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-54f5-account-create-update-79mzp"] Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.085407 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-c965t"] Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.096463 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-dx9c7"] Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.105786 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-c965t"] Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.114625 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-dx9c7"] Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.122999 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-54f5-account-create-update-79mzp"] Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.413017 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="266d33c8-6cee-4e1c-aec4-e2f291b185e8" path="/var/lib/kubelet/pods/266d33c8-6cee-4e1c-aec4-e2f291b185e8/volumes" Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.414526 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52d9a37a-b91f-455c-9f2a-c0b60832ff78" path="/var/lib/kubelet/pods/52d9a37a-b91f-455c-9f2a-c0b60832ff78/volumes" Dec 03 06:11:16 crc kubenswrapper[4810]: I1203 06:11:16.416024 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea042a9c-a4d6-4a64-9954-7a5b9f197ea3" path="/var/lib/kubelet/pods/ea042a9c-a4d6-4a64-9954-7a5b9f197ea3/volumes" Dec 03 06:11:17 crc kubenswrapper[4810]: I1203 06:11:17.057586 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b0f1-account-create-update-kjdzz"] Dec 03 06:11:17 crc kubenswrapper[4810]: I1203 06:11:17.067077 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-5hwv5"] Dec 03 06:11:17 crc kubenswrapper[4810]: I1203 06:11:17.076114 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b0f1-account-create-update-kjdzz"] Dec 03 06:11:17 crc kubenswrapper[4810]: I1203 06:11:17.088615 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-5hwv5"] Dec 03 06:11:18 crc kubenswrapper[4810]: I1203 06:11:18.056958 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-57b4-account-create-update-qbvs6"] Dec 03 06:11:18 crc kubenswrapper[4810]: I1203 06:11:18.072846 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-57b4-account-create-update-qbvs6"] Dec 03 06:11:18 crc kubenswrapper[4810]: I1203 06:11:18.399856 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e220d06-666c-4087-8cb7-89a996c55933" path="/var/lib/kubelet/pods/7e220d06-666c-4087-8cb7-89a996c55933/volumes" Dec 03 06:11:18 crc kubenswrapper[4810]: I1203 06:11:18.401603 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="826ba06a-b58a-4700-a8ff-f6512e825a23" path="/var/lib/kubelet/pods/826ba06a-b58a-4700-a8ff-f6512e825a23/volumes" Dec 03 06:11:18 crc kubenswrapper[4810]: I1203 06:11:18.403046 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2092357-d70b-4bc0-ac9e-3660aff8d920" path="/var/lib/kubelet/pods/f2092357-d70b-4bc0-ac9e-3660aff8d920/volumes" Dec 03 06:11:24 crc kubenswrapper[4810]: I1203 06:11:24.377672 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:11:24 crc kubenswrapper[4810]: E1203 06:11:24.378659 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.185143 4810 scope.go:117] "RemoveContainer" containerID="fbcafc073b219f82b56af7642471f0f85154534e6a4817246b107b0036b05efc" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.230218 4810 scope.go:117] "RemoveContainer" containerID="faf1e5fdae941d52e0f432a48746ab6949f34b8a0329cf14d0f000796529f290" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.320328 4810 scope.go:117] "RemoveContainer" containerID="58c3520dbc61640d15dd4d988ab559bf485c156a465f5c8826aaaa746a873770" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.357549 4810 scope.go:117] "RemoveContainer" containerID="b65cbb71ee610603bc16586f32dcad3af45a61894ce5c0412af1393e8c6c8374" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.400142 4810 scope.go:117] "RemoveContainer" containerID="60f1198d60fed1031cdb39a88dba7b0c1abb313d59833b474854be1adda7949f" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.439586 4810 scope.go:117] "RemoveContainer" containerID="5a8267f2778112568ca98f1b1e1e5587f23df38a1d2c9606bdd2ff58dedbbd79" Dec 03 06:11:28 crc kubenswrapper[4810]: I1203 06:11:28.483471 4810 scope.go:117] "RemoveContainer" containerID="d0f69f43c6fb537893db03308455486ea5f2c35c42f2110383c9d4df98d20b27" Dec 03 06:11:37 crc kubenswrapper[4810]: I1203 06:11:37.378074 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:11:37 crc kubenswrapper[4810]: E1203 06:11:37.379345 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:11:52 crc kubenswrapper[4810]: I1203 06:11:52.069648 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nnphx"] Dec 03 06:11:52 crc kubenswrapper[4810]: I1203 06:11:52.082394 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nnphx"] Dec 03 06:11:52 crc kubenswrapper[4810]: I1203 06:11:52.377360 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:11:52 crc kubenswrapper[4810]: E1203 06:11:52.378101 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:11:52 crc kubenswrapper[4810]: I1203 06:11:52.397211 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3445e1e0-f732-451f-bb47-ad7e6492dfa3" path="/var/lib/kubelet/pods/3445e1e0-f732-451f-bb47-ad7e6492dfa3/volumes" Dec 03 06:12:03 crc kubenswrapper[4810]: I1203 06:12:03.378056 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:12:03 crc kubenswrapper[4810]: E1203 06:12:03.378997 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:12:16 crc kubenswrapper[4810]: I1203 06:12:16.090603 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-cl2k9"] Dec 03 06:12:16 crc kubenswrapper[4810]: I1203 06:12:16.106102 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-cl2k9"] Dec 03 06:12:16 crc kubenswrapper[4810]: I1203 06:12:16.388426 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:12:16 crc kubenswrapper[4810]: E1203 06:12:16.388823 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:12:16 crc kubenswrapper[4810]: I1203 06:12:16.399286 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae78cc18-4ba1-4b54-b0cd-a9569d2281ed" path="/var/lib/kubelet/pods/ae78cc18-4ba1-4b54-b0cd-a9569d2281ed/volumes" Dec 03 06:12:18 crc kubenswrapper[4810]: I1203 06:12:18.355045 4810 generic.go:334] "Generic (PLEG): container finished" podID="4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" containerID="96bb951b39c5a5a78c18a88df96f112c4ebb423880be5d38e2ee03b8ac99a5f2" exitCode=0 Dec 03 06:12:18 crc kubenswrapper[4810]: I1203 06:12:18.355169 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" event={"ID":"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f","Type":"ContainerDied","Data":"96bb951b39c5a5a78c18a88df96f112c4ebb423880be5d38e2ee03b8ac99a5f2"} Dec 03 06:12:19 crc kubenswrapper[4810]: I1203 06:12:19.867340 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.046702 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7ts2b"] Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.060548 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7ts2b"] Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.070389 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-ssh-key\") pod \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.070639 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrdbx\" (UniqueName: \"kubernetes.io/projected/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-kube-api-access-qrdbx\") pod \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.070951 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-inventory\") pod \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\" (UID: \"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f\") " Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.079572 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-kube-api-access-qrdbx" (OuterVolumeSpecName: "kube-api-access-qrdbx") pod "4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" (UID: "4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f"). InnerVolumeSpecName "kube-api-access-qrdbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.116316 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" (UID: "4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.135067 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-inventory" (OuterVolumeSpecName: "inventory") pod "4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" (UID: "4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.173808 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.173853 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.173867 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrdbx\" (UniqueName: \"kubernetes.io/projected/4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f-kube-api-access-qrdbx\") on node \"crc\" DevicePath \"\"" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.392899 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.417903 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd75096a-f05e-412e-a147-cba8eb1474b9" path="/var/lib/kubelet/pods/dd75096a-f05e-412e-a147-cba8eb1474b9/volumes" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.420415 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k" event={"ID":"4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f","Type":"ContainerDied","Data":"641ea77c20dcd49f8a18851aef6ace45d33c16fb7426c5788d58a6f360bd0099"} Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.420471 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="641ea77c20dcd49f8a18851aef6ace45d33c16fb7426c5788d58a6f360bd0099" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.490795 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6"] Dec 03 06:12:20 crc kubenswrapper[4810]: E1203 06:12:20.491985 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.492020 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.492320 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.493434 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.496505 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.496724 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.496944 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.498115 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.503383 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6"] Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.585222 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.585573 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.585712 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xm2b\" (UniqueName: \"kubernetes.io/projected/5f7c21ae-9d4b-4783-97da-66a73e29790a-kube-api-access-6xm2b\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.687761 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.687894 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.687953 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xm2b\" (UniqueName: \"kubernetes.io/projected/5f7c21ae-9d4b-4783-97da-66a73e29790a-kube-api-access-6xm2b\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.693810 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.696143 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.708191 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xm2b\" (UniqueName: \"kubernetes.io/projected/5f7c21ae-9d4b-4783-97da-66a73e29790a-kube-api-access-6xm2b\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:20 crc kubenswrapper[4810]: I1203 06:12:20.814512 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:21 crc kubenswrapper[4810]: I1203 06:12:21.470897 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6"] Dec 03 06:12:22 crc kubenswrapper[4810]: I1203 06:12:22.429590 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" event={"ID":"5f7c21ae-9d4b-4783-97da-66a73e29790a","Type":"ContainerStarted","Data":"88d0183c59ecf7dbc6ade0e49f3db171afe0ac85cb023174be545f756c447392"} Dec 03 06:12:23 crc kubenswrapper[4810]: I1203 06:12:23.441338 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" event={"ID":"5f7c21ae-9d4b-4783-97da-66a73e29790a","Type":"ContainerStarted","Data":"3e22f7c26d707edf2aeaeec3b8ffcd7d19418f663bd7789f89a4489bb618e425"} Dec 03 06:12:23 crc kubenswrapper[4810]: I1203 06:12:23.472146 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" podStartSLOduration=2.711895469 podStartE2EDuration="3.472122046s" podCreationTimestamp="2025-12-03 06:12:20 +0000 UTC" firstStartedPulling="2025-12-03 06:12:21.478911141 +0000 UTC m=+1865.414371992" lastFinishedPulling="2025-12-03 06:12:22.239137718 +0000 UTC m=+1866.174598569" observedRunningTime="2025-12-03 06:12:23.470956625 +0000 UTC m=+1867.406417466" watchObservedRunningTime="2025-12-03 06:12:23.472122046 +0000 UTC m=+1867.407582887" Dec 03 06:12:28 crc kubenswrapper[4810]: I1203 06:12:28.709166 4810 scope.go:117] "RemoveContainer" containerID="efe26805e0c97ed6a5ae468813450dd7baed46a0fda72b25fdcdabe45537f2e7" Dec 03 06:12:28 crc kubenswrapper[4810]: I1203 06:12:28.777587 4810 scope.go:117] "RemoveContainer" containerID="294f048c5ba1c6aa7fcee6ccd7db8fb0f95147b402c87e7441528633ae117b54" Dec 03 06:12:28 crc kubenswrapper[4810]: I1203 06:12:28.845584 4810 scope.go:117] "RemoveContainer" containerID="51ec9842055502fc42451726abce9f64b172d32ae02e7a3e8f4ac3ce41b512e6" Dec 03 06:12:29 crc kubenswrapper[4810]: I1203 06:12:29.519641 4810 generic.go:334] "Generic (PLEG): container finished" podID="5f7c21ae-9d4b-4783-97da-66a73e29790a" containerID="3e22f7c26d707edf2aeaeec3b8ffcd7d19418f663bd7789f89a4489bb618e425" exitCode=0 Dec 03 06:12:29 crc kubenswrapper[4810]: I1203 06:12:29.519775 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" event={"ID":"5f7c21ae-9d4b-4783-97da-66a73e29790a","Type":"ContainerDied","Data":"3e22f7c26d707edf2aeaeec3b8ffcd7d19418f663bd7789f89a4489bb618e425"} Dec 03 06:12:30 crc kubenswrapper[4810]: I1203 06:12:30.378761 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:12:30 crc kubenswrapper[4810]: E1203 06:12:30.379541 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.061552 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.170229 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-ssh-key\") pod \"5f7c21ae-9d4b-4783-97da-66a73e29790a\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.170688 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-inventory\") pod \"5f7c21ae-9d4b-4783-97da-66a73e29790a\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.170914 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xm2b\" (UniqueName: \"kubernetes.io/projected/5f7c21ae-9d4b-4783-97da-66a73e29790a-kube-api-access-6xm2b\") pod \"5f7c21ae-9d4b-4783-97da-66a73e29790a\" (UID: \"5f7c21ae-9d4b-4783-97da-66a73e29790a\") " Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.179297 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f7c21ae-9d4b-4783-97da-66a73e29790a-kube-api-access-6xm2b" (OuterVolumeSpecName: "kube-api-access-6xm2b") pod "5f7c21ae-9d4b-4783-97da-66a73e29790a" (UID: "5f7c21ae-9d4b-4783-97da-66a73e29790a"). InnerVolumeSpecName "kube-api-access-6xm2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.201390 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f7c21ae-9d4b-4783-97da-66a73e29790a" (UID: "5f7c21ae-9d4b-4783-97da-66a73e29790a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.210960 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-inventory" (OuterVolumeSpecName: "inventory") pod "5f7c21ae-9d4b-4783-97da-66a73e29790a" (UID: "5f7c21ae-9d4b-4783-97da-66a73e29790a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.274834 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.274880 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f7c21ae-9d4b-4783-97da-66a73e29790a-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.274905 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xm2b\" (UniqueName: \"kubernetes.io/projected/5f7c21ae-9d4b-4783-97da-66a73e29790a-kube-api-access-6xm2b\") on node \"crc\" DevicePath \"\"" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.546796 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.546716 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6" event={"ID":"5f7c21ae-9d4b-4783-97da-66a73e29790a","Type":"ContainerDied","Data":"88d0183c59ecf7dbc6ade0e49f3db171afe0ac85cb023174be545f756c447392"} Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.546957 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88d0183c59ecf7dbc6ade0e49f3db171afe0ac85cb023174be545f756c447392" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.694963 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml"] Dec 03 06:12:31 crc kubenswrapper[4810]: E1203 06:12:31.696471 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f7c21ae-9d4b-4783-97da-66a73e29790a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.696498 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f7c21ae-9d4b-4783-97da-66a73e29790a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.697011 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f7c21ae-9d4b-4783-97da-66a73e29790a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.706838 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.710511 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.710858 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.711422 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.712249 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.734102 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml"] Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.821055 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.821127 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.821241 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp4h7\" (UniqueName: \"kubernetes.io/projected/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-kube-api-access-cp4h7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.925279 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.925339 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.925454 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp4h7\" (UniqueName: \"kubernetes.io/projected/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-kube-api-access-cp4h7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.932209 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.932747 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:31 crc kubenswrapper[4810]: I1203 06:12:31.949342 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp4h7\" (UniqueName: \"kubernetes.io/projected/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-kube-api-access-cp4h7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-6kpml\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:32 crc kubenswrapper[4810]: I1203 06:12:32.041118 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:12:32 crc kubenswrapper[4810]: I1203 06:12:32.639696 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml"] Dec 03 06:12:33 crc kubenswrapper[4810]: I1203 06:12:33.569181 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" event={"ID":"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc","Type":"ContainerStarted","Data":"471601e7bbf1822f53d01fdeeb3bc7d040d519ac7b50658e95ec09f2dbef351c"} Dec 03 06:12:33 crc kubenswrapper[4810]: I1203 06:12:33.569648 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" event={"ID":"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc","Type":"ContainerStarted","Data":"6f8560871db663768f51249b0ea9b3028b93a7c9eccdc2ea74f21d4fa08f622b"} Dec 03 06:12:33 crc kubenswrapper[4810]: I1203 06:12:33.600591 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" podStartSLOduration=2.06838553 podStartE2EDuration="2.60055562s" podCreationTimestamp="2025-12-03 06:12:31 +0000 UTC" firstStartedPulling="2025-12-03 06:12:32.648517058 +0000 UTC m=+1876.583977919" lastFinishedPulling="2025-12-03 06:12:33.180687158 +0000 UTC m=+1877.116148009" observedRunningTime="2025-12-03 06:12:33.589632584 +0000 UTC m=+1877.525093435" watchObservedRunningTime="2025-12-03 06:12:33.60055562 +0000 UTC m=+1877.536016461" Dec 03 06:12:45 crc kubenswrapper[4810]: I1203 06:12:45.377909 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:12:45 crc kubenswrapper[4810]: E1203 06:12:45.379149 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:12:57 crc kubenswrapper[4810]: I1203 06:12:57.378182 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:12:57 crc kubenswrapper[4810]: I1203 06:12:57.913806 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"f0c5fd3db41f9ed0ec1cfbdb47ba058ffd18f513197cccd37a5a7bb10b79784c"} Dec 03 06:13:02 crc kubenswrapper[4810]: I1203 06:13:02.044528 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-f9dsl"] Dec 03 06:13:02 crc kubenswrapper[4810]: I1203 06:13:02.056719 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-f9dsl"] Dec 03 06:13:02 crc kubenswrapper[4810]: I1203 06:13:02.395873 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c" path="/var/lib/kubelet/pods/5e60e2fe-0566-4bc6-bfaf-f1bb33e2216c/volumes" Dec 03 06:13:18 crc kubenswrapper[4810]: I1203 06:13:18.181474 4810 generic.go:334] "Generic (PLEG): container finished" podID="04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" containerID="471601e7bbf1822f53d01fdeeb3bc7d040d519ac7b50658e95ec09f2dbef351c" exitCode=0 Dec 03 06:13:18 crc kubenswrapper[4810]: I1203 06:13:18.181585 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" event={"ID":"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc","Type":"ContainerDied","Data":"471601e7bbf1822f53d01fdeeb3bc7d040d519ac7b50658e95ec09f2dbef351c"} Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.813214 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.964235 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cp4h7\" (UniqueName: \"kubernetes.io/projected/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-kube-api-access-cp4h7\") pod \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.964317 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-inventory\") pod \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.964361 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-ssh-key\") pod \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\" (UID: \"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc\") " Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.975753 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-kube-api-access-cp4h7" (OuterVolumeSpecName: "kube-api-access-cp4h7") pod "04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" (UID: "04e18cd1-868a-4d9c-882a-c1af0ef1f4dc"). InnerVolumeSpecName "kube-api-access-cp4h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.995133 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-inventory" (OuterVolumeSpecName: "inventory") pod "04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" (UID: "04e18cd1-868a-4d9c-882a-c1af0ef1f4dc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:13:19 crc kubenswrapper[4810]: I1203 06:13:19.996842 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" (UID: "04e18cd1-868a-4d9c-882a-c1af0ef1f4dc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.067318 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cp4h7\" (UniqueName: \"kubernetes.io/projected/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-kube-api-access-cp4h7\") on node \"crc\" DevicePath \"\"" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.067404 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.067430 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04e18cd1-868a-4d9c-882a-c1af0ef1f4dc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.214637 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" event={"ID":"04e18cd1-868a-4d9c-882a-c1af0ef1f4dc","Type":"ContainerDied","Data":"6f8560871db663768f51249b0ea9b3028b93a7c9eccdc2ea74f21d4fa08f622b"} Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.214700 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f8560871db663768f51249b0ea9b3028b93a7c9eccdc2ea74f21d4fa08f622b" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.214781 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-6kpml" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.358492 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl"] Dec 03 06:13:20 crc kubenswrapper[4810]: E1203 06:13:20.359219 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.359250 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.359601 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="04e18cd1-868a-4d9c-882a-c1af0ef1f4dc" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.360625 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.363900 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.364717 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.364859 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.367173 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.438020 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl"] Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.478307 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg7zp\" (UniqueName: \"kubernetes.io/projected/b303053e-72d8-44d5-8766-d83b7fcba87a-kube-api-access-cg7zp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.479454 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.479543 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.582221 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg7zp\" (UniqueName: \"kubernetes.io/projected/b303053e-72d8-44d5-8766-d83b7fcba87a-kube-api-access-cg7zp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.582385 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.582460 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.588004 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.590535 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.603579 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg7zp\" (UniqueName: \"kubernetes.io/projected/b303053e-72d8-44d5-8766-d83b7fcba87a-kube-api-access-cg7zp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-x76jl\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.689790 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.724144 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ksg72"] Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.727577 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.744277 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ksg72"] Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.788525 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-utilities\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.788618 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-catalog-content\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.788651 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmrtz\" (UniqueName: \"kubernetes.io/projected/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-kube-api-access-xmrtz\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.894239 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-utilities\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.895576 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-catalog-content\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.895638 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmrtz\" (UniqueName: \"kubernetes.io/projected/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-kube-api-access-xmrtz\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.896442 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-catalog-content\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.896867 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-utilities\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:20 crc kubenswrapper[4810]: I1203 06:13:20.926527 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmrtz\" (UniqueName: \"kubernetes.io/projected/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-kube-api-access-xmrtz\") pod \"community-operators-ksg72\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:21 crc kubenswrapper[4810]: I1203 06:13:21.110973 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:21 crc kubenswrapper[4810]: I1203 06:13:21.325205 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl"] Dec 03 06:13:21 crc kubenswrapper[4810]: I1203 06:13:21.633090 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ksg72"] Dec 03 06:13:21 crc kubenswrapper[4810]: W1203 06:13:21.638212 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f9b9baa_b9c2_4003_9a65_09e9d9f9a0e6.slice/crio-841b4812570027dcb42bc309b350d9a19dc159785ba14eb22e9f0b086175058b WatchSource:0}: Error finding container 841b4812570027dcb42bc309b350d9a19dc159785ba14eb22e9f0b086175058b: Status 404 returned error can't find the container with id 841b4812570027dcb42bc309b350d9a19dc159785ba14eb22e9f0b086175058b Dec 03 06:13:22 crc kubenswrapper[4810]: I1203 06:13:22.240600 4810 generic.go:334] "Generic (PLEG): container finished" podID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerID="a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb" exitCode=0 Dec 03 06:13:22 crc kubenswrapper[4810]: I1203 06:13:22.240688 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerDied","Data":"a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb"} Dec 03 06:13:22 crc kubenswrapper[4810]: I1203 06:13:22.241186 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerStarted","Data":"841b4812570027dcb42bc309b350d9a19dc159785ba14eb22e9f0b086175058b"} Dec 03 06:13:22 crc kubenswrapper[4810]: I1203 06:13:22.245055 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" event={"ID":"b303053e-72d8-44d5-8766-d83b7fcba87a","Type":"ContainerStarted","Data":"e3913f181143cdd5637213ef90bf2ace61d0d52b3de9dc027afd2386bfa7be0a"} Dec 03 06:13:22 crc kubenswrapper[4810]: I1203 06:13:22.245124 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" event={"ID":"b303053e-72d8-44d5-8766-d83b7fcba87a","Type":"ContainerStarted","Data":"6f0205c0d561a3539f0a8d3e8508efe14d6e370923031b2c3f98cdfd21de433a"} Dec 03 06:13:22 crc kubenswrapper[4810]: I1203 06:13:22.296261 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" podStartSLOduration=1.7740996180000002 podStartE2EDuration="2.296231975s" podCreationTimestamp="2025-12-03 06:13:20 +0000 UTC" firstStartedPulling="2025-12-03 06:13:21.361201599 +0000 UTC m=+1925.296662440" lastFinishedPulling="2025-12-03 06:13:21.883333956 +0000 UTC m=+1925.818794797" observedRunningTime="2025-12-03 06:13:22.289772756 +0000 UTC m=+1926.225233597" watchObservedRunningTime="2025-12-03 06:13:22.296231975 +0000 UTC m=+1926.231692816" Dec 03 06:13:23 crc kubenswrapper[4810]: I1203 06:13:23.258954 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerStarted","Data":"6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1"} Dec 03 06:13:24 crc kubenswrapper[4810]: I1203 06:13:24.277791 4810 generic.go:334] "Generic (PLEG): container finished" podID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerID="6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1" exitCode=0 Dec 03 06:13:24 crc kubenswrapper[4810]: I1203 06:13:24.277907 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerDied","Data":"6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1"} Dec 03 06:13:25 crc kubenswrapper[4810]: I1203 06:13:25.295875 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerStarted","Data":"8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01"} Dec 03 06:13:25 crc kubenswrapper[4810]: I1203 06:13:25.330103 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ksg72" podStartSLOduration=2.711769604 podStartE2EDuration="5.330085028s" podCreationTimestamp="2025-12-03 06:13:20 +0000 UTC" firstStartedPulling="2025-12-03 06:13:22.243385418 +0000 UTC m=+1926.178846269" lastFinishedPulling="2025-12-03 06:13:24.861700842 +0000 UTC m=+1928.797161693" observedRunningTime="2025-12-03 06:13:25.32409438 +0000 UTC m=+1929.259555231" watchObservedRunningTime="2025-12-03 06:13:25.330085028 +0000 UTC m=+1929.265545869" Dec 03 06:13:29 crc kubenswrapper[4810]: I1203 06:13:29.004373 4810 scope.go:117] "RemoveContainer" containerID="9cac7fa0e9c4445c5e6a45e87197a4fa2f6c3a5d8f1e803c7de595cc29e792f3" Dec 03 06:13:31 crc kubenswrapper[4810]: I1203 06:13:31.112794 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:31 crc kubenswrapper[4810]: I1203 06:13:31.114362 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:31 crc kubenswrapper[4810]: I1203 06:13:31.166504 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:31 crc kubenswrapper[4810]: I1203 06:13:31.425202 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:32 crc kubenswrapper[4810]: I1203 06:13:32.493714 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ksg72"] Dec 03 06:13:33 crc kubenswrapper[4810]: I1203 06:13:33.413770 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ksg72" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="registry-server" containerID="cri-o://8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01" gracePeriod=2 Dec 03 06:13:33 crc kubenswrapper[4810]: I1203 06:13:33.920502 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.028345 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-catalog-content\") pod \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.028400 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-utilities\") pod \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.028638 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmrtz\" (UniqueName: \"kubernetes.io/projected/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-kube-api-access-xmrtz\") pod \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\" (UID: \"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6\") " Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.029630 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-utilities" (OuterVolumeSpecName: "utilities") pod "2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" (UID: "2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.030307 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.036585 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-kube-api-access-xmrtz" (OuterVolumeSpecName: "kube-api-access-xmrtz") pod "2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" (UID: "2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6"). InnerVolumeSpecName "kube-api-access-xmrtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.093510 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" (UID: "2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.133405 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmrtz\" (UniqueName: \"kubernetes.io/projected/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-kube-api-access-xmrtz\") on node \"crc\" DevicePath \"\"" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.133551 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.426776 4810 generic.go:334] "Generic (PLEG): container finished" podID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerID="8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01" exitCode=0 Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.426850 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerDied","Data":"8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01"} Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.426896 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ksg72" event={"ID":"2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6","Type":"ContainerDied","Data":"841b4812570027dcb42bc309b350d9a19dc159785ba14eb22e9f0b086175058b"} Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.426925 4810 scope.go:117] "RemoveContainer" containerID="8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.427178 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ksg72" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.459505 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ksg72"] Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.466711 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ksg72"] Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.484962 4810 scope.go:117] "RemoveContainer" containerID="6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.520837 4810 scope.go:117] "RemoveContainer" containerID="a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.560997 4810 scope.go:117] "RemoveContainer" containerID="8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01" Dec 03 06:13:34 crc kubenswrapper[4810]: E1203 06:13:34.561991 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01\": container with ID starting with 8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01 not found: ID does not exist" containerID="8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.562066 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01"} err="failed to get container status \"8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01\": rpc error: code = NotFound desc = could not find container \"8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01\": container with ID starting with 8ff622682a7f5c69d33692905326eb92faad78dc467252190105a1db1e0cef01 not found: ID does not exist" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.562115 4810 scope.go:117] "RemoveContainer" containerID="6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1" Dec 03 06:13:34 crc kubenswrapper[4810]: E1203 06:13:34.562527 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1\": container with ID starting with 6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1 not found: ID does not exist" containerID="6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.562586 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1"} err="failed to get container status \"6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1\": rpc error: code = NotFound desc = could not find container \"6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1\": container with ID starting with 6295598e4b5f5a705344dbf65ee602319d3f1c8d5ee6f5f23bfc3e392be8c7e1 not found: ID does not exist" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.562625 4810 scope.go:117] "RemoveContainer" containerID="a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb" Dec 03 06:13:34 crc kubenswrapper[4810]: E1203 06:13:34.563009 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb\": container with ID starting with a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb not found: ID does not exist" containerID="a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb" Dec 03 06:13:34 crc kubenswrapper[4810]: I1203 06:13:34.563071 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb"} err="failed to get container status \"a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb\": rpc error: code = NotFound desc = could not find container \"a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb\": container with ID starting with a21db096d08b096dda01ee579ad31ab33c0337b024872f39942d658a6aa23dfb not found: ID does not exist" Dec 03 06:13:36 crc kubenswrapper[4810]: I1203 06:13:36.398009 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" path="/var/lib/kubelet/pods/2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6/volumes" Dec 03 06:14:23 crc kubenswrapper[4810]: I1203 06:14:23.102830 4810 generic.go:334] "Generic (PLEG): container finished" podID="b303053e-72d8-44d5-8766-d83b7fcba87a" containerID="e3913f181143cdd5637213ef90bf2ace61d0d52b3de9dc027afd2386bfa7be0a" exitCode=0 Dec 03 06:14:23 crc kubenswrapper[4810]: I1203 06:14:23.103913 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" event={"ID":"b303053e-72d8-44d5-8766-d83b7fcba87a","Type":"ContainerDied","Data":"e3913f181143cdd5637213ef90bf2ace61d0d52b3de9dc027afd2386bfa7be0a"} Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.712599 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.867049 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-ssh-key\") pod \"b303053e-72d8-44d5-8766-d83b7fcba87a\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.867387 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-inventory\") pod \"b303053e-72d8-44d5-8766-d83b7fcba87a\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.867510 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg7zp\" (UniqueName: \"kubernetes.io/projected/b303053e-72d8-44d5-8766-d83b7fcba87a-kube-api-access-cg7zp\") pod \"b303053e-72d8-44d5-8766-d83b7fcba87a\" (UID: \"b303053e-72d8-44d5-8766-d83b7fcba87a\") " Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.879347 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b303053e-72d8-44d5-8766-d83b7fcba87a-kube-api-access-cg7zp" (OuterVolumeSpecName: "kube-api-access-cg7zp") pod "b303053e-72d8-44d5-8766-d83b7fcba87a" (UID: "b303053e-72d8-44d5-8766-d83b7fcba87a"). InnerVolumeSpecName "kube-api-access-cg7zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.901203 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b303053e-72d8-44d5-8766-d83b7fcba87a" (UID: "b303053e-72d8-44d5-8766-d83b7fcba87a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.926036 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-inventory" (OuterVolumeSpecName: "inventory") pod "b303053e-72d8-44d5-8766-d83b7fcba87a" (UID: "b303053e-72d8-44d5-8766-d83b7fcba87a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.971522 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.971979 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b303053e-72d8-44d5-8766-d83b7fcba87a-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:24 crc kubenswrapper[4810]: I1203 06:14:24.972003 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg7zp\" (UniqueName: \"kubernetes.io/projected/b303053e-72d8-44d5-8766-d83b7fcba87a-kube-api-access-cg7zp\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.128978 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" event={"ID":"b303053e-72d8-44d5-8766-d83b7fcba87a","Type":"ContainerDied","Data":"6f0205c0d561a3539f0a8d3e8508efe14d6e370923031b2c3f98cdfd21de433a"} Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.129042 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f0205c0d561a3539f0a8d3e8508efe14d6e370923031b2c3f98cdfd21de433a" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.129114 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-x76jl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.263848 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hnbdl"] Dec 03 06:14:25 crc kubenswrapper[4810]: E1203 06:14:25.264536 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b303053e-72d8-44d5-8766-d83b7fcba87a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.264572 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b303053e-72d8-44d5-8766-d83b7fcba87a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:14:25 crc kubenswrapper[4810]: E1203 06:14:25.264594 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="extract-content" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.264610 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="extract-content" Dec 03 06:14:25 crc kubenswrapper[4810]: E1203 06:14:25.264636 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="registry-server" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.264649 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="registry-server" Dec 03 06:14:25 crc kubenswrapper[4810]: E1203 06:14:25.264673 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="extract-utilities" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.264691 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="extract-utilities" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.265087 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f9b9baa-b9c2-4003-9a65-09e9d9f9a0e6" containerName="registry-server" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.265131 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b303053e-72d8-44d5-8766-d83b7fcba87a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.266272 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.269140 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.269538 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.271607 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.272819 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.294855 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hnbdl"] Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.388805 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.389001 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whh5s\" (UniqueName: \"kubernetes.io/projected/0200a8b5-f03d-494e-9741-987a521ea388-kube-api-access-whh5s\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.389053 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.491569 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whh5s\" (UniqueName: \"kubernetes.io/projected/0200a8b5-f03d-494e-9741-987a521ea388-kube-api-access-whh5s\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.491623 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.491774 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.498615 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.498647 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.524887 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whh5s\" (UniqueName: \"kubernetes.io/projected/0200a8b5-f03d-494e-9741-987a521ea388-kube-api-access-whh5s\") pod \"ssh-known-hosts-edpm-deployment-hnbdl\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:25 crc kubenswrapper[4810]: I1203 06:14:25.598086 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:26 crc kubenswrapper[4810]: I1203 06:14:26.056783 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hnbdl"] Dec 03 06:14:26 crc kubenswrapper[4810]: I1203 06:14:26.064577 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:14:26 crc kubenswrapper[4810]: I1203 06:14:26.140990 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" event={"ID":"0200a8b5-f03d-494e-9741-987a521ea388","Type":"ContainerStarted","Data":"1ec38e1fafd69c265ce15b628076e79177190c6eda19216c395fa12a1413e700"} Dec 03 06:14:27 crc kubenswrapper[4810]: I1203 06:14:27.151961 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" event={"ID":"0200a8b5-f03d-494e-9741-987a521ea388","Type":"ContainerStarted","Data":"9d6d98b5a7c33e50ee9678263adc5310bf99c5977894093588e26455fc6d35d1"} Dec 03 06:14:27 crc kubenswrapper[4810]: I1203 06:14:27.182243 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" podStartSLOduration=1.62463485 podStartE2EDuration="2.182221991s" podCreationTimestamp="2025-12-03 06:14:25 +0000 UTC" firstStartedPulling="2025-12-03 06:14:26.064299496 +0000 UTC m=+1989.999760337" lastFinishedPulling="2025-12-03 06:14:26.621886607 +0000 UTC m=+1990.557347478" observedRunningTime="2025-12-03 06:14:27.176275915 +0000 UTC m=+1991.111736756" watchObservedRunningTime="2025-12-03 06:14:27.182221991 +0000 UTC m=+1991.117682822" Dec 03 06:14:35 crc kubenswrapper[4810]: I1203 06:14:35.232207 4810 generic.go:334] "Generic (PLEG): container finished" podID="0200a8b5-f03d-494e-9741-987a521ea388" containerID="9d6d98b5a7c33e50ee9678263adc5310bf99c5977894093588e26455fc6d35d1" exitCode=0 Dec 03 06:14:35 crc kubenswrapper[4810]: I1203 06:14:35.232283 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" event={"ID":"0200a8b5-f03d-494e-9741-987a521ea388","Type":"ContainerDied","Data":"9d6d98b5a7c33e50ee9678263adc5310bf99c5977894093588e26455fc6d35d1"} Dec 03 06:14:36 crc kubenswrapper[4810]: I1203 06:14:36.682717 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:36 crc kubenswrapper[4810]: I1203 06:14:36.902074 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whh5s\" (UniqueName: \"kubernetes.io/projected/0200a8b5-f03d-494e-9741-987a521ea388-kube-api-access-whh5s\") pod \"0200a8b5-f03d-494e-9741-987a521ea388\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " Dec 03 06:14:36 crc kubenswrapper[4810]: I1203 06:14:36.902486 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-inventory-0\") pod \"0200a8b5-f03d-494e-9741-987a521ea388\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " Dec 03 06:14:36 crc kubenswrapper[4810]: I1203 06:14:36.902838 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-ssh-key-openstack-edpm-ipam\") pod \"0200a8b5-f03d-494e-9741-987a521ea388\" (UID: \"0200a8b5-f03d-494e-9741-987a521ea388\") " Dec 03 06:14:36 crc kubenswrapper[4810]: I1203 06:14:36.952097 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0200a8b5-f03d-494e-9741-987a521ea388-kube-api-access-whh5s" (OuterVolumeSpecName: "kube-api-access-whh5s") pod "0200a8b5-f03d-494e-9741-987a521ea388" (UID: "0200a8b5-f03d-494e-9741-987a521ea388"). InnerVolumeSpecName "kube-api-access-whh5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.004043 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0200a8b5-f03d-494e-9741-987a521ea388" (UID: "0200a8b5-f03d-494e-9741-987a521ea388"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.005695 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whh5s\" (UniqueName: \"kubernetes.io/projected/0200a8b5-f03d-494e-9741-987a521ea388-kube-api-access-whh5s\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.005721 4810 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.024837 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0200a8b5-f03d-494e-9741-987a521ea388" (UID: "0200a8b5-f03d-494e-9741-987a521ea388"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.107612 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0200a8b5-f03d-494e-9741-987a521ea388-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.257101 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" event={"ID":"0200a8b5-f03d-494e-9741-987a521ea388","Type":"ContainerDied","Data":"1ec38e1fafd69c265ce15b628076e79177190c6eda19216c395fa12a1413e700"} Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.257506 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ec38e1fafd69c265ce15b628076e79177190c6eda19216c395fa12a1413e700" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.257224 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hnbdl" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.359409 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp"] Dec 03 06:14:37 crc kubenswrapper[4810]: E1203 06:14:37.359891 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0200a8b5-f03d-494e-9741-987a521ea388" containerName="ssh-known-hosts-edpm-deployment" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.359912 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0200a8b5-f03d-494e-9741-987a521ea388" containerName="ssh-known-hosts-edpm-deployment" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.360124 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0200a8b5-f03d-494e-9741-987a521ea388" containerName="ssh-known-hosts-edpm-deployment" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.360997 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.366618 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.366694 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.366768 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.370719 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.395493 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp"] Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.516141 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.516224 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mssxf\" (UniqueName: \"kubernetes.io/projected/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-kube-api-access-mssxf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.517050 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.622112 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.622180 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.622210 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mssxf\" (UniqueName: \"kubernetes.io/projected/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-kube-api-access-mssxf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.626226 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.626531 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.641437 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mssxf\" (UniqueName: \"kubernetes.io/projected/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-kube-api-access-mssxf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c66cp\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:37 crc kubenswrapper[4810]: I1203 06:14:37.681985 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:38 crc kubenswrapper[4810]: I1203 06:14:38.236905 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp"] Dec 03 06:14:38 crc kubenswrapper[4810]: I1203 06:14:38.271023 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" event={"ID":"a2097290-7aae-478a-9bd0-7d8c5a32b4d1","Type":"ContainerStarted","Data":"1ea0774575722021fa789fa855e9c161e6192d30e4ff692a459f7f0c87be9944"} Dec 03 06:14:39 crc kubenswrapper[4810]: I1203 06:14:39.286791 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" event={"ID":"a2097290-7aae-478a-9bd0-7d8c5a32b4d1","Type":"ContainerStarted","Data":"35b9fbe85d6a3317445ffcb01d7049399ae4aa9051bbbbfe5f2726e7780dbec8"} Dec 03 06:14:39 crc kubenswrapper[4810]: I1203 06:14:39.328085 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" podStartSLOduration=1.758046406 podStartE2EDuration="2.328050254s" podCreationTimestamp="2025-12-03 06:14:37 +0000 UTC" firstStartedPulling="2025-12-03 06:14:38.252370429 +0000 UTC m=+2002.187831280" lastFinishedPulling="2025-12-03 06:14:38.822374287 +0000 UTC m=+2002.757835128" observedRunningTime="2025-12-03 06:14:39.312044383 +0000 UTC m=+2003.247505274" watchObservedRunningTime="2025-12-03 06:14:39.328050254 +0000 UTC m=+2003.263511115" Dec 03 06:14:48 crc kubenswrapper[4810]: I1203 06:14:48.429128 4810 generic.go:334] "Generic (PLEG): container finished" podID="a2097290-7aae-478a-9bd0-7d8c5a32b4d1" containerID="35b9fbe85d6a3317445ffcb01d7049399ae4aa9051bbbbfe5f2726e7780dbec8" exitCode=0 Dec 03 06:14:48 crc kubenswrapper[4810]: I1203 06:14:48.429291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" event={"ID":"a2097290-7aae-478a-9bd0-7d8c5a32b4d1","Type":"ContainerDied","Data":"35b9fbe85d6a3317445ffcb01d7049399ae4aa9051bbbbfe5f2726e7780dbec8"} Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.084599 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.255969 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mssxf\" (UniqueName: \"kubernetes.io/projected/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-kube-api-access-mssxf\") pod \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.256105 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-inventory\") pod \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.256584 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-ssh-key\") pod \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\" (UID: \"a2097290-7aae-478a-9bd0-7d8c5a32b4d1\") " Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.270074 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-kube-api-access-mssxf" (OuterVolumeSpecName: "kube-api-access-mssxf") pod "a2097290-7aae-478a-9bd0-7d8c5a32b4d1" (UID: "a2097290-7aae-478a-9bd0-7d8c5a32b4d1"). InnerVolumeSpecName "kube-api-access-mssxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.305932 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-inventory" (OuterVolumeSpecName: "inventory") pod "a2097290-7aae-478a-9bd0-7d8c5a32b4d1" (UID: "a2097290-7aae-478a-9bd0-7d8c5a32b4d1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.306871 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a2097290-7aae-478a-9bd0-7d8c5a32b4d1" (UID: "a2097290-7aae-478a-9bd0-7d8c5a32b4d1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.361080 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mssxf\" (UniqueName: \"kubernetes.io/projected/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-kube-api-access-mssxf\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.361648 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.361659 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2097290-7aae-478a-9bd0-7d8c5a32b4d1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.455434 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" event={"ID":"a2097290-7aae-478a-9bd0-7d8c5a32b4d1","Type":"ContainerDied","Data":"1ea0774575722021fa789fa855e9c161e6192d30e4ff692a459f7f0c87be9944"} Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.456049 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ea0774575722021fa789fa855e9c161e6192d30e4ff692a459f7f0c87be9944" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.455590 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c66cp" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.542333 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c"] Dec 03 06:14:50 crc kubenswrapper[4810]: E1203 06:14:50.542959 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2097290-7aae-478a-9bd0-7d8c5a32b4d1" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.542981 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2097290-7aae-478a-9bd0-7d8c5a32b4d1" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.543268 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2097290-7aae-478a-9bd0-7d8c5a32b4d1" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.544329 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.548168 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.549430 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.549813 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.551383 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.555244 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c"] Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.695723 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.695879 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.695934 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zqbg\" (UniqueName: \"kubernetes.io/projected/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-kube-api-access-8zqbg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.797233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.797352 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.797400 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zqbg\" (UniqueName: \"kubernetes.io/projected/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-kube-api-access-8zqbg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.802290 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.803343 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.815556 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zqbg\" (UniqueName: \"kubernetes.io/projected/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-kube-api-access-8zqbg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:50 crc kubenswrapper[4810]: I1203 06:14:50.903211 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:14:51 crc kubenswrapper[4810]: I1203 06:14:51.548513 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c"] Dec 03 06:14:51 crc kubenswrapper[4810]: W1203 06:14:51.573050 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod841fc3ce_d8a7_4cb2_89ab_31cae73ce18d.slice/crio-68914f1da8e9e5e5ab0f67323a8b16ccf7470ec02fecf7c12e37d96d737caa2c WatchSource:0}: Error finding container 68914f1da8e9e5e5ab0f67323a8b16ccf7470ec02fecf7c12e37d96d737caa2c: Status 404 returned error can't find the container with id 68914f1da8e9e5e5ab0f67323a8b16ccf7470ec02fecf7c12e37d96d737caa2c Dec 03 06:14:52 crc kubenswrapper[4810]: I1203 06:14:52.480118 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" event={"ID":"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d","Type":"ContainerStarted","Data":"f631ef08821bd020e2816565fbb06df53dcfd74ebf87e7d8ef2938e02190eef1"} Dec 03 06:14:52 crc kubenswrapper[4810]: I1203 06:14:52.480530 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" event={"ID":"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d","Type":"ContainerStarted","Data":"68914f1da8e9e5e5ab0f67323a8b16ccf7470ec02fecf7c12e37d96d737caa2c"} Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.217993 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" podStartSLOduration=6.716592382 podStartE2EDuration="7.217962366s" podCreationTimestamp="2025-12-03 06:14:50 +0000 UTC" firstStartedPulling="2025-12-03 06:14:51.577083289 +0000 UTC m=+2015.512544130" lastFinishedPulling="2025-12-03 06:14:52.078453273 +0000 UTC m=+2016.013914114" observedRunningTime="2025-12-03 06:14:52.500360507 +0000 UTC m=+2016.435821358" watchObservedRunningTime="2025-12-03 06:14:57.217962366 +0000 UTC m=+2021.153423207" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.230754 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5jk7q"] Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.233095 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.254196 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5jk7q"] Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.353941 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x642h\" (UniqueName: \"kubernetes.io/projected/e3d521e8-5b73-492d-958b-f627f2391efc-kube-api-access-x642h\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.354989 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-utilities\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.355097 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-catalog-content\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.458240 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x642h\" (UniqueName: \"kubernetes.io/projected/e3d521e8-5b73-492d-958b-f627f2391efc-kube-api-access-x642h\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.459226 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-utilities\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.458408 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-utilities\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.459375 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-catalog-content\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.459781 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-catalog-content\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.489127 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x642h\" (UniqueName: \"kubernetes.io/projected/e3d521e8-5b73-492d-958b-f627f2391efc-kube-api-access-x642h\") pod \"redhat-operators-5jk7q\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:57 crc kubenswrapper[4810]: I1203 06:14:57.556983 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:14:58 crc kubenswrapper[4810]: I1203 06:14:58.123892 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5jk7q"] Dec 03 06:14:58 crc kubenswrapper[4810]: W1203 06:14:58.135056 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3d521e8_5b73_492d_958b_f627f2391efc.slice/crio-a5b4c3b6323905c5c39c6c13f9484a6ed2fdc1b55b3781127b6846ab46323a81 WatchSource:0}: Error finding container a5b4c3b6323905c5c39c6c13f9484a6ed2fdc1b55b3781127b6846ab46323a81: Status 404 returned error can't find the container with id a5b4c3b6323905c5c39c6c13f9484a6ed2fdc1b55b3781127b6846ab46323a81 Dec 03 06:14:58 crc kubenswrapper[4810]: I1203 06:14:58.547881 4810 generic.go:334] "Generic (PLEG): container finished" podID="e3d521e8-5b73-492d-958b-f627f2391efc" containerID="5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9" exitCode=0 Dec 03 06:14:58 crc kubenswrapper[4810]: I1203 06:14:58.547948 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerDied","Data":"5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9"} Dec 03 06:14:58 crc kubenswrapper[4810]: I1203 06:14:58.547982 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerStarted","Data":"a5b4c3b6323905c5c39c6c13f9484a6ed2fdc1b55b3781127b6846ab46323a81"} Dec 03 06:14:59 crc kubenswrapper[4810]: I1203 06:14:59.563018 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerStarted","Data":"b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6"} Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.140961 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp"] Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.142707 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.146330 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.146364 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.165243 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp"] Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.267976 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6705d396-c70c-4160-83b5-ded3ceef591e-secret-volume\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.268109 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6705d396-c70c-4160-83b5-ded3ceef591e-config-volume\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.268153 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6m9j\" (UniqueName: \"kubernetes.io/projected/6705d396-c70c-4160-83b5-ded3ceef591e-kube-api-access-g6m9j\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.370453 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6705d396-c70c-4160-83b5-ded3ceef591e-secret-volume\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.370558 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6705d396-c70c-4160-83b5-ded3ceef591e-config-volume\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.370587 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6m9j\" (UniqueName: \"kubernetes.io/projected/6705d396-c70c-4160-83b5-ded3ceef591e-kube-api-access-g6m9j\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.371920 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6705d396-c70c-4160-83b5-ded3ceef591e-config-volume\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.381887 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6705d396-c70c-4160-83b5-ded3ceef591e-secret-volume\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.388941 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6m9j\" (UniqueName: \"kubernetes.io/projected/6705d396-c70c-4160-83b5-ded3ceef591e-kube-api-access-g6m9j\") pod \"collect-profiles-29412375-g9zcp\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.465765 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:00 crc kubenswrapper[4810]: I1203 06:15:00.955461 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp"] Dec 03 06:15:01 crc kubenswrapper[4810]: I1203 06:15:01.588040 4810 generic.go:334] "Generic (PLEG): container finished" podID="6705d396-c70c-4160-83b5-ded3ceef591e" containerID="938246c49c78e60dd21dde994d0a564ebca4d10ff16861206703cea066c37df8" exitCode=0 Dec 03 06:15:01 crc kubenswrapper[4810]: I1203 06:15:01.588131 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" event={"ID":"6705d396-c70c-4160-83b5-ded3ceef591e","Type":"ContainerDied","Data":"938246c49c78e60dd21dde994d0a564ebca4d10ff16861206703cea066c37df8"} Dec 03 06:15:01 crc kubenswrapper[4810]: I1203 06:15:01.589905 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" event={"ID":"6705d396-c70c-4160-83b5-ded3ceef591e","Type":"ContainerStarted","Data":"790fbc9224d0bafe12c6923d0123f12f3b0c0e1f47682d4ae0fc944f70f9440a"} Dec 03 06:15:01 crc kubenswrapper[4810]: I1203 06:15:01.595603 4810 generic.go:334] "Generic (PLEG): container finished" podID="e3d521e8-5b73-492d-958b-f627f2391efc" containerID="b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6" exitCode=0 Dec 03 06:15:01 crc kubenswrapper[4810]: I1203 06:15:01.595638 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerDied","Data":"b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6"} Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.014653 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.136419 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6705d396-c70c-4160-83b5-ded3ceef591e-secret-volume\") pod \"6705d396-c70c-4160-83b5-ded3ceef591e\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.136838 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6m9j\" (UniqueName: \"kubernetes.io/projected/6705d396-c70c-4160-83b5-ded3ceef591e-kube-api-access-g6m9j\") pod \"6705d396-c70c-4160-83b5-ded3ceef591e\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.137013 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6705d396-c70c-4160-83b5-ded3ceef591e-config-volume\") pod \"6705d396-c70c-4160-83b5-ded3ceef591e\" (UID: \"6705d396-c70c-4160-83b5-ded3ceef591e\") " Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.137725 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6705d396-c70c-4160-83b5-ded3ceef591e-config-volume" (OuterVolumeSpecName: "config-volume") pod "6705d396-c70c-4160-83b5-ded3ceef591e" (UID: "6705d396-c70c-4160-83b5-ded3ceef591e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.138157 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6705d396-c70c-4160-83b5-ded3ceef591e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.144491 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6705d396-c70c-4160-83b5-ded3ceef591e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6705d396-c70c-4160-83b5-ded3ceef591e" (UID: "6705d396-c70c-4160-83b5-ded3ceef591e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.161136 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6705d396-c70c-4160-83b5-ded3ceef591e-kube-api-access-g6m9j" (OuterVolumeSpecName: "kube-api-access-g6m9j") pod "6705d396-c70c-4160-83b5-ded3ceef591e" (UID: "6705d396-c70c-4160-83b5-ded3ceef591e"). InnerVolumeSpecName "kube-api-access-g6m9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.239915 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6705d396-c70c-4160-83b5-ded3ceef591e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.239957 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6m9j\" (UniqueName: \"kubernetes.io/projected/6705d396-c70c-4160-83b5-ded3ceef591e-kube-api-access-g6m9j\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.617090 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" event={"ID":"6705d396-c70c-4160-83b5-ded3ceef591e","Type":"ContainerDied","Data":"790fbc9224d0bafe12c6923d0123f12f3b0c0e1f47682d4ae0fc944f70f9440a"} Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.617135 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="790fbc9224d0bafe12c6923d0123f12f3b0c0e1f47682d4ae0fc944f70f9440a" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.617166 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp" Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.621689 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerStarted","Data":"d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433"} Dec 03 06:15:03 crc kubenswrapper[4810]: I1203 06:15:03.661575 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5jk7q" podStartSLOduration=2.74640804 podStartE2EDuration="6.661547748s" podCreationTimestamp="2025-12-03 06:14:57 +0000 UTC" firstStartedPulling="2025-12-03 06:14:58.551048528 +0000 UTC m=+2022.486509369" lastFinishedPulling="2025-12-03 06:15:02.466188196 +0000 UTC m=+2026.401649077" observedRunningTime="2025-12-03 06:15:03.649579223 +0000 UTC m=+2027.585040104" watchObservedRunningTime="2025-12-03 06:15:03.661547748 +0000 UTC m=+2027.597008619" Dec 03 06:15:04 crc kubenswrapper[4810]: I1203 06:15:04.110319 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k"] Dec 03 06:15:04 crc kubenswrapper[4810]: I1203 06:15:04.117365 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412330-ltx4k"] Dec 03 06:15:04 crc kubenswrapper[4810]: I1203 06:15:04.392419 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03" path="/var/lib/kubelet/pods/b17e24b1-3f65-40b9-9dc3-dc6b7ab14a03/volumes" Dec 03 06:15:04 crc kubenswrapper[4810]: I1203 06:15:04.637686 4810 generic.go:334] "Generic (PLEG): container finished" podID="841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" containerID="f631ef08821bd020e2816565fbb06df53dcfd74ebf87e7d8ef2938e02190eef1" exitCode=0 Dec 03 06:15:04 crc kubenswrapper[4810]: I1203 06:15:04.637792 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" event={"ID":"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d","Type":"ContainerDied","Data":"f631ef08821bd020e2816565fbb06df53dcfd74ebf87e7d8ef2938e02190eef1"} Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.197669 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.315635 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zqbg\" (UniqueName: \"kubernetes.io/projected/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-kube-api-access-8zqbg\") pod \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.315999 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-inventory\") pod \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.316131 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-ssh-key\") pod \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\" (UID: \"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d\") " Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.334105 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-kube-api-access-8zqbg" (OuterVolumeSpecName: "kube-api-access-8zqbg") pod "841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" (UID: "841fc3ce-d8a7-4cb2-89ab-31cae73ce18d"). InnerVolumeSpecName "kube-api-access-8zqbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.370192 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" (UID: "841fc3ce-d8a7-4cb2-89ab-31cae73ce18d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.421305 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.421340 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zqbg\" (UniqueName: \"kubernetes.io/projected/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-kube-api-access-8zqbg\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.434566 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-inventory" (OuterVolumeSpecName: "inventory") pod "841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" (UID: "841fc3ce-d8a7-4cb2-89ab-31cae73ce18d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.523400 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/841fc3ce-d8a7-4cb2-89ab-31cae73ce18d-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.660476 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" event={"ID":"841fc3ce-d8a7-4cb2-89ab-31cae73ce18d","Type":"ContainerDied","Data":"68914f1da8e9e5e5ab0f67323a8b16ccf7470ec02fecf7c12e37d96d737caa2c"} Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.660524 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68914f1da8e9e5e5ab0f67323a8b16ccf7470ec02fecf7c12e37d96d737caa2c" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.660549 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.763209 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp"] Dec 03 06:15:06 crc kubenswrapper[4810]: E1203 06:15:06.764151 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6705d396-c70c-4160-83b5-ded3ceef591e" containerName="collect-profiles" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.764171 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6705d396-c70c-4160-83b5-ded3ceef591e" containerName="collect-profiles" Dec 03 06:15:06 crc kubenswrapper[4810]: E1203 06:15:06.764195 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.764203 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.764408 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="841fc3ce-d8a7-4cb2-89ab-31cae73ce18d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.764433 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6705d396-c70c-4160-83b5-ded3ceef591e" containerName="collect-profiles" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.765217 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.769526 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.769987 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.770117 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.770281 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.771013 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.771146 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.772212 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.779223 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.780783 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp"] Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830461 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830513 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830541 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830592 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830628 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830662 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830683 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830718 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830765 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830795 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830816 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830832 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830865 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5l6r\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-kube-api-access-j5l6r\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.830885 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.932655 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933495 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933526 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933570 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933611 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933671 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933692 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.933795 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.934066 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5l6r\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-kube-api-access-j5l6r\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.934098 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.934147 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.934170 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.934195 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.934239 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.937525 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.938334 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.940447 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.940601 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.941292 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.942518 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.942645 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.942826 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.943967 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.944382 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.944880 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.946604 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.949064 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:06 crc kubenswrapper[4810]: I1203 06:15:06.953070 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5l6r\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-kube-api-access-j5l6r\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:07 crc kubenswrapper[4810]: I1203 06:15:07.084347 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:07 crc kubenswrapper[4810]: I1203 06:15:07.442220 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp"] Dec 03 06:15:07 crc kubenswrapper[4810]: I1203 06:15:07.558169 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:15:07 crc kubenswrapper[4810]: I1203 06:15:07.558241 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:15:07 crc kubenswrapper[4810]: I1203 06:15:07.671911 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" event={"ID":"ee03c5ac-6b76-4852-b07f-b73140f037dd","Type":"ContainerStarted","Data":"1ea411f84eb981593922944b609f663b90aab3b3cc25c8f60e3eb1459a508b52"} Dec 03 06:15:08 crc kubenswrapper[4810]: I1203 06:15:08.612717 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5jk7q" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="registry-server" probeResult="failure" output=< Dec 03 06:15:08 crc kubenswrapper[4810]: timeout: failed to connect service ":50051" within 1s Dec 03 06:15:08 crc kubenswrapper[4810]: > Dec 03 06:15:08 crc kubenswrapper[4810]: I1203 06:15:08.686827 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" event={"ID":"ee03c5ac-6b76-4852-b07f-b73140f037dd","Type":"ContainerStarted","Data":"809ce8fc55897e69fab01c6f98b237012178e47a488fff81a34607cd1ab51c2f"} Dec 03 06:15:08 crc kubenswrapper[4810]: I1203 06:15:08.740121 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" podStartSLOduration=2.269166033 podStartE2EDuration="2.740101776s" podCreationTimestamp="2025-12-03 06:15:06 +0000 UTC" firstStartedPulling="2025-12-03 06:15:07.449690036 +0000 UTC m=+2031.385150867" lastFinishedPulling="2025-12-03 06:15:07.920625759 +0000 UTC m=+2031.856086610" observedRunningTime="2025-12-03 06:15:08.732363353 +0000 UTC m=+2032.667824214" watchObservedRunningTime="2025-12-03 06:15:08.740101776 +0000 UTC m=+2032.675562617" Dec 03 06:15:17 crc kubenswrapper[4810]: I1203 06:15:17.637160 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:15:17 crc kubenswrapper[4810]: I1203 06:15:17.724961 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:15:17 crc kubenswrapper[4810]: I1203 06:15:17.897649 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5jk7q"] Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.110532 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5jk7q" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="registry-server" containerID="cri-o://d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433" gracePeriod=2 Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.636815 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.761143 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-utilities\") pod \"e3d521e8-5b73-492d-958b-f627f2391efc\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.761816 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-catalog-content\") pod \"e3d521e8-5b73-492d-958b-f627f2391efc\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.761968 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x642h\" (UniqueName: \"kubernetes.io/projected/e3d521e8-5b73-492d-958b-f627f2391efc-kube-api-access-x642h\") pod \"e3d521e8-5b73-492d-958b-f627f2391efc\" (UID: \"e3d521e8-5b73-492d-958b-f627f2391efc\") " Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.762602 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-utilities" (OuterVolumeSpecName: "utilities") pod "e3d521e8-5b73-492d-958b-f627f2391efc" (UID: "e3d521e8-5b73-492d-958b-f627f2391efc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.770055 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3d521e8-5b73-492d-958b-f627f2391efc-kube-api-access-x642h" (OuterVolumeSpecName: "kube-api-access-x642h") pod "e3d521e8-5b73-492d-958b-f627f2391efc" (UID: "e3d521e8-5b73-492d-958b-f627f2391efc"). InnerVolumeSpecName "kube-api-access-x642h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.864456 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.864500 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x642h\" (UniqueName: \"kubernetes.io/projected/e3d521e8-5b73-492d-958b-f627f2391efc-kube-api-access-x642h\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.885075 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3d521e8-5b73-492d-958b-f627f2391efc" (UID: "e3d521e8-5b73-492d-958b-f627f2391efc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:15:19 crc kubenswrapper[4810]: I1203 06:15:19.967589 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3d521e8-5b73-492d-958b-f627f2391efc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.126520 4810 generic.go:334] "Generic (PLEG): container finished" podID="e3d521e8-5b73-492d-958b-f627f2391efc" containerID="d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433" exitCode=0 Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.126591 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerDied","Data":"d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433"} Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.126616 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jk7q" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.126650 4810 scope.go:117] "RemoveContainer" containerID="d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.126634 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jk7q" event={"ID":"e3d521e8-5b73-492d-958b-f627f2391efc","Type":"ContainerDied","Data":"a5b4c3b6323905c5c39c6c13f9484a6ed2fdc1b55b3781127b6846ab46323a81"} Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.160685 4810 scope.go:117] "RemoveContainer" containerID="b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.168250 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5jk7q"] Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.176607 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5jk7q"] Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.236654 4810 scope.go:117] "RemoveContainer" containerID="5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.277134 4810 scope.go:117] "RemoveContainer" containerID="d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433" Dec 03 06:15:20 crc kubenswrapper[4810]: E1203 06:15:20.277755 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433\": container with ID starting with d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433 not found: ID does not exist" containerID="d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.277806 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433"} err="failed to get container status \"d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433\": rpc error: code = NotFound desc = could not find container \"d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433\": container with ID starting with d664d54b7d2e27d8752708fd51fde7257a5b96dcb17f3853d243a3f93adbb433 not found: ID does not exist" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.277840 4810 scope.go:117] "RemoveContainer" containerID="b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6" Dec 03 06:15:20 crc kubenswrapper[4810]: E1203 06:15:20.278584 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6\": container with ID starting with b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6 not found: ID does not exist" containerID="b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.278650 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6"} err="failed to get container status \"b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6\": rpc error: code = NotFound desc = could not find container \"b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6\": container with ID starting with b628f04a6ed398b6449e9e3c38e155c337212562b160e4d3da8d36d0321b07a6 not found: ID does not exist" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.278695 4810 scope.go:117] "RemoveContainer" containerID="5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9" Dec 03 06:15:20 crc kubenswrapper[4810]: E1203 06:15:20.279385 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9\": container with ID starting with 5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9 not found: ID does not exist" containerID="5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.279425 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9"} err="failed to get container status \"5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9\": rpc error: code = NotFound desc = could not find container \"5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9\": container with ID starting with 5ce52efd0a49f137ae4b29c012233ce2d13703e8be74b3746b6b20670d7a4db9 not found: ID does not exist" Dec 03 06:15:20 crc kubenswrapper[4810]: I1203 06:15:20.391072 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" path="/var/lib/kubelet/pods/e3d521e8-5b73-492d-958b-f627f2391efc/volumes" Dec 03 06:15:25 crc kubenswrapper[4810]: I1203 06:15:25.677167 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:15:25 crc kubenswrapper[4810]: I1203 06:15:25.679922 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:15:29 crc kubenswrapper[4810]: I1203 06:15:29.136303 4810 scope.go:117] "RemoveContainer" containerID="8fc8dba3ad266dc8ccc256813c60a7d3dd765296d224de2842871c78d19ab012" Dec 03 06:15:54 crc kubenswrapper[4810]: I1203 06:15:54.545572 4810 generic.go:334] "Generic (PLEG): container finished" podID="ee03c5ac-6b76-4852-b07f-b73140f037dd" containerID="809ce8fc55897e69fab01c6f98b237012178e47a488fff81a34607cd1ab51c2f" exitCode=0 Dec 03 06:15:54 crc kubenswrapper[4810]: I1203 06:15:54.545706 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" event={"ID":"ee03c5ac-6b76-4852-b07f-b73140f037dd","Type":"ContainerDied","Data":"809ce8fc55897e69fab01c6f98b237012178e47a488fff81a34607cd1ab51c2f"} Dec 03 06:15:55 crc kubenswrapper[4810]: I1203 06:15:55.677499 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:15:55 crc kubenswrapper[4810]: I1203 06:15:55.677573 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.026561 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048535 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-libvirt-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048654 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-bootstrap-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048720 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048776 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ssh-key\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048828 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-inventory\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048907 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-ovn-default-certs-0\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048929 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.048962 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5l6r\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-kube-api-access-j5l6r\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.049014 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-neutron-metadata-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.049049 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-telemetry-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.049089 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ovn-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.049113 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-nova-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.049139 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.049157 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-repo-setup-combined-ca-bundle\") pod \"ee03c5ac-6b76-4852-b07f-b73140f037dd\" (UID: \"ee03c5ac-6b76-4852-b07f-b73140f037dd\") " Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.064093 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.064174 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-kube-api-access-j5l6r" (OuterVolumeSpecName: "kube-api-access-j5l6r") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "kube-api-access-j5l6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.064314 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.064673 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.064789 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.072347 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.072418 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.072625 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.073563 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.073840 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.076097 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.079086 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.110760 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-inventory" (OuterVolumeSpecName: "inventory") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.112547 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ee03c5ac-6b76-4852-b07f-b73140f037dd" (UID: "ee03c5ac-6b76-4852-b07f-b73140f037dd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.151523 4810 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.151856 4810 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.151940 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152016 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152101 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152174 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152252 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152333 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5l6r\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-kube-api-access-j5l6r\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152406 4810 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152531 4810 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152661 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152779 4810 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152858 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ee03c5ac-6b76-4852-b07f-b73140f037dd-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.152932 4810 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee03c5ac-6b76-4852-b07f-b73140f037dd-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.569948 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" event={"ID":"ee03c5ac-6b76-4852-b07f-b73140f037dd","Type":"ContainerDied","Data":"1ea411f84eb981593922944b609f663b90aab3b3cc25c8f60e3eb1459a508b52"} Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.570003 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ea411f84eb981593922944b609f663b90aab3b3cc25c8f60e3eb1459a508b52" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.570019 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.696805 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v"] Dec 03 06:15:56 crc kubenswrapper[4810]: E1203 06:15:56.697479 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee03c5ac-6b76-4852-b07f-b73140f037dd" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.697493 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee03c5ac-6b76-4852-b07f-b73140f037dd" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 06:15:56 crc kubenswrapper[4810]: E1203 06:15:56.697509 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="extract-utilities" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.697515 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="extract-utilities" Dec 03 06:15:56 crc kubenswrapper[4810]: E1203 06:15:56.697529 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="registry-server" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.697534 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="registry-server" Dec 03 06:15:56 crc kubenswrapper[4810]: E1203 06:15:56.697568 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="extract-content" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.697580 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="extract-content" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.697825 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3d521e8-5b73-492d-958b-f627f2391efc" containerName="registry-server" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.697840 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee03c5ac-6b76-4852-b07f-b73140f037dd" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.698465 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.701802 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.701900 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.702023 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.702982 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.704134 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.714018 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v"] Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.783404 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shq7g\" (UniqueName: \"kubernetes.io/projected/ba7b0eac-f456-4a3d-b96a-b44fc348d317-kube-api-access-shq7g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.783487 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.783531 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.783775 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.783846 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.886958 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.887065 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.887134 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.887185 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.887375 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shq7g\" (UniqueName: \"kubernetes.io/projected/ba7b0eac-f456-4a3d-b96a-b44fc348d317-kube-api-access-shq7g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.888246 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.895723 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.896345 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.900430 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:56 crc kubenswrapper[4810]: I1203 06:15:56.923123 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shq7g\" (UniqueName: \"kubernetes.io/projected/ba7b0eac-f456-4a3d-b96a-b44fc348d317-kube-api-access-shq7g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wz77v\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:57 crc kubenswrapper[4810]: I1203 06:15:57.025633 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:15:57 crc kubenswrapper[4810]: I1203 06:15:57.734215 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v"] Dec 03 06:15:58 crc kubenswrapper[4810]: I1203 06:15:58.602467 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" event={"ID":"ba7b0eac-f456-4a3d-b96a-b44fc348d317","Type":"ContainerStarted","Data":"0091fddf179a58ae0099ce28b4e2a2ff3c7986ed258e810df9a2c101183ea6f5"} Dec 03 06:15:58 crc kubenswrapper[4810]: I1203 06:15:58.602949 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" event={"ID":"ba7b0eac-f456-4a3d-b96a-b44fc348d317","Type":"ContainerStarted","Data":"2d262cb4c12ba1a9a42828137107019362b293d61d81e705063702c1bc0d0911"} Dec 03 06:15:58 crc kubenswrapper[4810]: I1203 06:15:58.647468 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" podStartSLOduration=2.137230797 podStartE2EDuration="2.647430124s" podCreationTimestamp="2025-12-03 06:15:56 +0000 UTC" firstStartedPulling="2025-12-03 06:15:57.753049518 +0000 UTC m=+2081.688510359" lastFinishedPulling="2025-12-03 06:15:58.263248815 +0000 UTC m=+2082.198709686" observedRunningTime="2025-12-03 06:15:58.633400763 +0000 UTC m=+2082.568861644" watchObservedRunningTime="2025-12-03 06:15:58.647430124 +0000 UTC m=+2082.582891015" Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.677563 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.678683 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.678786 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.680051 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f0c5fd3db41f9ed0ec1cfbdb47ba058ffd18f513197cccd37a5a7bb10b79784c"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.680153 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://f0c5fd3db41f9ed0ec1cfbdb47ba058ffd18f513197cccd37a5a7bb10b79784c" gracePeriod=600 Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.953994 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="f0c5fd3db41f9ed0ec1cfbdb47ba058ffd18f513197cccd37a5a7bb10b79784c" exitCode=0 Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.954072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"f0c5fd3db41f9ed0ec1cfbdb47ba058ffd18f513197cccd37a5a7bb10b79784c"} Dec 03 06:16:25 crc kubenswrapper[4810]: I1203 06:16:25.954415 4810 scope.go:117] "RemoveContainer" containerID="91a08e659f8d9581dcb0bb4c9e44e911c70fd9b8922f0251ec1d6c43bc119299" Dec 03 06:16:26 crc kubenswrapper[4810]: I1203 06:16:26.970655 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb"} Dec 03 06:17:13 crc kubenswrapper[4810]: I1203 06:17:13.531023 4810 generic.go:334] "Generic (PLEG): container finished" podID="ba7b0eac-f456-4a3d-b96a-b44fc348d317" containerID="0091fddf179a58ae0099ce28b4e2a2ff3c7986ed258e810df9a2c101183ea6f5" exitCode=0 Dec 03 06:17:13 crc kubenswrapper[4810]: I1203 06:17:13.531119 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" event={"ID":"ba7b0eac-f456-4a3d-b96a-b44fc348d317","Type":"ContainerDied","Data":"0091fddf179a58ae0099ce28b4e2a2ff3c7986ed258e810df9a2c101183ea6f5"} Dec 03 06:17:14 crc kubenswrapper[4810]: I1203 06:17:14.835805 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nqkbx"] Dec 03 06:17:14 crc kubenswrapper[4810]: I1203 06:17:14.838689 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:14 crc kubenswrapper[4810]: I1203 06:17:14.846017 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nqkbx"] Dec 03 06:17:14 crc kubenswrapper[4810]: I1203 06:17:14.975504 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5bwj\" (UniqueName: \"kubernetes.io/projected/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-kube-api-access-x5bwj\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:14 crc kubenswrapper[4810]: I1203 06:17:14.975627 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-utilities\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:14 crc kubenswrapper[4810]: I1203 06:17:14.975713 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-catalog-content\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.078101 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-catalog-content\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.078242 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5bwj\" (UniqueName: \"kubernetes.io/projected/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-kube-api-access-x5bwj\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.078328 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-utilities\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.078969 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-utilities\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.079275 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-catalog-content\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.100392 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5bwj\" (UniqueName: \"kubernetes.io/projected/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-kube-api-access-x5bwj\") pod \"certified-operators-nqkbx\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.158333 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.169299 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.281863 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovncontroller-config-0\") pod \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.282390 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shq7g\" (UniqueName: \"kubernetes.io/projected/ba7b0eac-f456-4a3d-b96a-b44fc348d317-kube-api-access-shq7g\") pod \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.282422 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovn-combined-ca-bundle\") pod \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.282503 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ssh-key\") pod \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.282558 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-inventory\") pod \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\" (UID: \"ba7b0eac-f456-4a3d-b96a-b44fc348d317\") " Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.291396 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ba7b0eac-f456-4a3d-b96a-b44fc348d317" (UID: "ba7b0eac-f456-4a3d-b96a-b44fc348d317"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.299644 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba7b0eac-f456-4a3d-b96a-b44fc348d317-kube-api-access-shq7g" (OuterVolumeSpecName: "kube-api-access-shq7g") pod "ba7b0eac-f456-4a3d-b96a-b44fc348d317" (UID: "ba7b0eac-f456-4a3d-b96a-b44fc348d317"). InnerVolumeSpecName "kube-api-access-shq7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.327899 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ba7b0eac-f456-4a3d-b96a-b44fc348d317" (UID: "ba7b0eac-f456-4a3d-b96a-b44fc348d317"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.333421 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "ba7b0eac-f456-4a3d-b96a-b44fc348d317" (UID: "ba7b0eac-f456-4a3d-b96a-b44fc348d317"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.346020 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-inventory" (OuterVolumeSpecName: "inventory") pod "ba7b0eac-f456-4a3d-b96a-b44fc348d317" (UID: "ba7b0eac-f456-4a3d-b96a-b44fc348d317"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.392703 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shq7g\" (UniqueName: \"kubernetes.io/projected/ba7b0eac-f456-4a3d-b96a-b44fc348d317-kube-api-access-shq7g\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.392817 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.392839 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.392850 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba7b0eac-f456-4a3d-b96a-b44fc348d317-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.392860 4810 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ba7b0eac-f456-4a3d-b96a-b44fc348d317-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.573138 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" event={"ID":"ba7b0eac-f456-4a3d-b96a-b44fc348d317","Type":"ContainerDied","Data":"2d262cb4c12ba1a9a42828137107019362b293d61d81e705063702c1bc0d0911"} Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.573182 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d262cb4c12ba1a9a42828137107019362b293d61d81e705063702c1bc0d0911" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.573261 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wz77v" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.663854 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl"] Dec 03 06:17:15 crc kubenswrapper[4810]: E1203 06:17:15.664293 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba7b0eac-f456-4a3d-b96a-b44fc348d317" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.664308 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba7b0eac-f456-4a3d-b96a-b44fc348d317" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.664520 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba7b0eac-f456-4a3d-b96a-b44fc348d317" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.665245 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.668063 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.668619 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.668863 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.669033 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.669178 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.672021 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl"] Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.672091 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.749525 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nqkbx"] Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.802403 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.802467 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.802588 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.802641 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbn5d\" (UniqueName: \"kubernetes.io/projected/201a4b8d-5ecb-4cc4-bacb-51d499efb485-kube-api-access-lbn5d\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.802661 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.802804 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.905041 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbn5d\" (UniqueName: \"kubernetes.io/projected/201a4b8d-5ecb-4cc4-bacb-51d499efb485-kube-api-access-lbn5d\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.906237 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.906452 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.906553 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.909264 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.909871 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.912179 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.913123 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.915240 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.916778 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.919846 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.924703 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbn5d\" (UniqueName: \"kubernetes.io/projected/201a4b8d-5ecb-4cc4-bacb-51d499efb485-kube-api-access-lbn5d\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:15 crc kubenswrapper[4810]: I1203 06:17:15.997121 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:17:16 crc kubenswrapper[4810]: I1203 06:17:16.589325 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl"] Dec 03 06:17:16 crc kubenswrapper[4810]: W1203 06:17:16.594506 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod201a4b8d_5ecb_4cc4_bacb_51d499efb485.slice/crio-20621a58f5212fd79ac4ae7e4716f27d36448af797132326bf0df1c263bb061d WatchSource:0}: Error finding container 20621a58f5212fd79ac4ae7e4716f27d36448af797132326bf0df1c263bb061d: Status 404 returned error can't find the container with id 20621a58f5212fd79ac4ae7e4716f27d36448af797132326bf0df1c263bb061d Dec 03 06:17:16 crc kubenswrapper[4810]: I1203 06:17:16.601476 4810 generic.go:334] "Generic (PLEG): container finished" podID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerID="ba9d9f6f82ef4bf6a565fb450da4fb3d70ba015c40d040e406d9379c6a33a0aa" exitCode=0 Dec 03 06:17:16 crc kubenswrapper[4810]: I1203 06:17:16.601520 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerDied","Data":"ba9d9f6f82ef4bf6a565fb450da4fb3d70ba015c40d040e406d9379c6a33a0aa"} Dec 03 06:17:16 crc kubenswrapper[4810]: I1203 06:17:16.601551 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerStarted","Data":"7a70c3bf5efba0a786378624b8840a882ef57080436bf7d64d068e6d84ca992b"} Dec 03 06:17:17 crc kubenswrapper[4810]: I1203 06:17:17.070432 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:17:17 crc kubenswrapper[4810]: I1203 06:17:17.614208 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerStarted","Data":"d308c9f2d21d0f053ed8cb4ff142a803a60018624d4df740ef1127ffd38c7039"} Dec 03 06:17:17 crc kubenswrapper[4810]: I1203 06:17:17.616643 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" event={"ID":"201a4b8d-5ecb-4cc4-bacb-51d499efb485","Type":"ContainerStarted","Data":"b5f6bf758994ea254028b0daa887b403864f01c74fd4772f4aa68db43a6178f7"} Dec 03 06:17:17 crc kubenswrapper[4810]: I1203 06:17:17.616697 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" event={"ID":"201a4b8d-5ecb-4cc4-bacb-51d499efb485","Type":"ContainerStarted","Data":"20621a58f5212fd79ac4ae7e4716f27d36448af797132326bf0df1c263bb061d"} Dec 03 06:17:17 crc kubenswrapper[4810]: I1203 06:17:17.672674 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" podStartSLOduration=2.2066959 podStartE2EDuration="2.672655135s" podCreationTimestamp="2025-12-03 06:17:15 +0000 UTC" firstStartedPulling="2025-12-03 06:17:16.600812801 +0000 UTC m=+2160.536273652" lastFinishedPulling="2025-12-03 06:17:17.066772036 +0000 UTC m=+2161.002232887" observedRunningTime="2025-12-03 06:17:17.654533505 +0000 UTC m=+2161.589994376" watchObservedRunningTime="2025-12-03 06:17:17.672655135 +0000 UTC m=+2161.608115966" Dec 03 06:17:18 crc kubenswrapper[4810]: I1203 06:17:18.634116 4810 generic.go:334] "Generic (PLEG): container finished" podID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerID="d308c9f2d21d0f053ed8cb4ff142a803a60018624d4df740ef1127ffd38c7039" exitCode=0 Dec 03 06:17:18 crc kubenswrapper[4810]: I1203 06:17:18.634250 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerDied","Data":"d308c9f2d21d0f053ed8cb4ff142a803a60018624d4df740ef1127ffd38c7039"} Dec 03 06:17:19 crc kubenswrapper[4810]: I1203 06:17:19.652395 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerStarted","Data":"2a10c70294d6d0fdb0dfc5077c85ca4ee61752fea30aad9be58c7a98c358ae53"} Dec 03 06:17:19 crc kubenswrapper[4810]: I1203 06:17:19.692335 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nqkbx" podStartSLOduration=3.257509286 podStartE2EDuration="5.692298831s" podCreationTimestamp="2025-12-03 06:17:14 +0000 UTC" firstStartedPulling="2025-12-03 06:17:16.604839738 +0000 UTC m=+2160.540300589" lastFinishedPulling="2025-12-03 06:17:19.039629253 +0000 UTC m=+2162.975090134" observedRunningTime="2025-12-03 06:17:19.68355085 +0000 UTC m=+2163.619011761" watchObservedRunningTime="2025-12-03 06:17:19.692298831 +0000 UTC m=+2163.627759712" Dec 03 06:17:25 crc kubenswrapper[4810]: I1203 06:17:25.170304 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:25 crc kubenswrapper[4810]: I1203 06:17:25.171463 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:25 crc kubenswrapper[4810]: I1203 06:17:25.269017 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:25 crc kubenswrapper[4810]: I1203 06:17:25.826671 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:25 crc kubenswrapper[4810]: I1203 06:17:25.901947 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nqkbx"] Dec 03 06:17:27 crc kubenswrapper[4810]: I1203 06:17:27.779585 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nqkbx" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="registry-server" containerID="cri-o://2a10c70294d6d0fdb0dfc5077c85ca4ee61752fea30aad9be58c7a98c358ae53" gracePeriod=2 Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.794033 4810 generic.go:334] "Generic (PLEG): container finished" podID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerID="2a10c70294d6d0fdb0dfc5077c85ca4ee61752fea30aad9be58c7a98c358ae53" exitCode=0 Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.794548 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerDied","Data":"2a10c70294d6d0fdb0dfc5077c85ca4ee61752fea30aad9be58c7a98c358ae53"} Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.794600 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqkbx" event={"ID":"f0044677-1cd8-4db8-8584-9b5a2b6e93cd","Type":"ContainerDied","Data":"7a70c3bf5efba0a786378624b8840a882ef57080436bf7d64d068e6d84ca992b"} Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.794617 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a70c3bf5efba0a786378624b8840a882ef57080436bf7d64d068e6d84ca992b" Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.839127 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.940927 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-catalog-content\") pod \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.941071 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5bwj\" (UniqueName: \"kubernetes.io/projected/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-kube-api-access-x5bwj\") pod \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.941164 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-utilities\") pod \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\" (UID: \"f0044677-1cd8-4db8-8584-9b5a2b6e93cd\") " Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.942878 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-utilities" (OuterVolumeSpecName: "utilities") pod "f0044677-1cd8-4db8-8584-9b5a2b6e93cd" (UID: "f0044677-1cd8-4db8-8584-9b5a2b6e93cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:17:28 crc kubenswrapper[4810]: I1203 06:17:28.949074 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-kube-api-access-x5bwj" (OuterVolumeSpecName: "kube-api-access-x5bwj") pod "f0044677-1cd8-4db8-8584-9b5a2b6e93cd" (UID: "f0044677-1cd8-4db8-8584-9b5a2b6e93cd"). InnerVolumeSpecName "kube-api-access-x5bwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.005120 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0044677-1cd8-4db8-8584-9b5a2b6e93cd" (UID: "f0044677-1cd8-4db8-8584-9b5a2b6e93cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.043610 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5bwj\" (UniqueName: \"kubernetes.io/projected/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-kube-api-access-x5bwj\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.043653 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.043667 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0044677-1cd8-4db8-8584-9b5a2b6e93cd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.806644 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqkbx" Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.858697 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nqkbx"] Dec 03 06:17:29 crc kubenswrapper[4810]: I1203 06:17:29.870664 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nqkbx"] Dec 03 06:17:30 crc kubenswrapper[4810]: I1203 06:17:30.396346 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" path="/var/lib/kubelet/pods/f0044677-1cd8-4db8-8584-9b5a2b6e93cd/volumes" Dec 03 06:18:12 crc kubenswrapper[4810]: I1203 06:18:12.371022 4810 generic.go:334] "Generic (PLEG): container finished" podID="201a4b8d-5ecb-4cc4-bacb-51d499efb485" containerID="b5f6bf758994ea254028b0daa887b403864f01c74fd4772f4aa68db43a6178f7" exitCode=0 Dec 03 06:18:12 crc kubenswrapper[4810]: I1203 06:18:12.371087 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" event={"ID":"201a4b8d-5ecb-4cc4-bacb-51d499efb485","Type":"ContainerDied","Data":"b5f6bf758994ea254028b0daa887b403864f01c74fd4772f4aa68db43a6178f7"} Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.051514 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.105961 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-nova-metadata-neutron-config-0\") pod \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.106637 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-inventory\") pod \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.106695 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbn5d\" (UniqueName: \"kubernetes.io/projected/201a4b8d-5ecb-4cc4-bacb-51d499efb485-kube-api-access-lbn5d\") pod \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.107988 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-metadata-combined-ca-bundle\") pod \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.108198 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-ovn-metadata-agent-neutron-config-0\") pod \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.109004 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-ssh-key\") pod \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\" (UID: \"201a4b8d-5ecb-4cc4-bacb-51d499efb485\") " Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.114028 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/201a4b8d-5ecb-4cc4-bacb-51d499efb485-kube-api-access-lbn5d" (OuterVolumeSpecName: "kube-api-access-lbn5d") pod "201a4b8d-5ecb-4cc4-bacb-51d499efb485" (UID: "201a4b8d-5ecb-4cc4-bacb-51d499efb485"). InnerVolumeSpecName "kube-api-access-lbn5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.116499 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "201a4b8d-5ecb-4cc4-bacb-51d499efb485" (UID: "201a4b8d-5ecb-4cc4-bacb-51d499efb485"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.153206 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "201a4b8d-5ecb-4cc4-bacb-51d499efb485" (UID: "201a4b8d-5ecb-4cc4-bacb-51d499efb485"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.153823 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "201a4b8d-5ecb-4cc4-bacb-51d499efb485" (UID: "201a4b8d-5ecb-4cc4-bacb-51d499efb485"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.154166 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "201a4b8d-5ecb-4cc4-bacb-51d499efb485" (UID: "201a4b8d-5ecb-4cc4-bacb-51d499efb485"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.162543 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-inventory" (OuterVolumeSpecName: "inventory") pod "201a4b8d-5ecb-4cc4-bacb-51d499efb485" (UID: "201a4b8d-5ecb-4cc4-bacb-51d499efb485"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.211897 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.211942 4810 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.211959 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.211972 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbn5d\" (UniqueName: \"kubernetes.io/projected/201a4b8d-5ecb-4cc4-bacb-51d499efb485-kube-api-access-lbn5d\") on node \"crc\" DevicePath \"\"" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.211986 4810 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.211999 4810 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/201a4b8d-5ecb-4cc4-bacb-51d499efb485-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.399449 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.406570 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl" event={"ID":"201a4b8d-5ecb-4cc4-bacb-51d499efb485","Type":"ContainerDied","Data":"20621a58f5212fd79ac4ae7e4716f27d36448af797132326bf0df1c263bb061d"} Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.406644 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20621a58f5212fd79ac4ae7e4716f27d36448af797132326bf0df1c263bb061d" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.541622 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt"] Dec 03 06:18:14 crc kubenswrapper[4810]: E1203 06:18:14.542228 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="extract-utilities" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.542261 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="extract-utilities" Dec 03 06:18:14 crc kubenswrapper[4810]: E1203 06:18:14.542294 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="registry-server" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.542305 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="registry-server" Dec 03 06:18:14 crc kubenswrapper[4810]: E1203 06:18:14.542337 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="extract-content" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.542348 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="extract-content" Dec 03 06:18:14 crc kubenswrapper[4810]: E1203 06:18:14.542398 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201a4b8d-5ecb-4cc4-bacb-51d499efb485" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.542414 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="201a4b8d-5ecb-4cc4-bacb-51d499efb485" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.542687 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0044677-1cd8-4db8-8584-9b5a2b6e93cd" containerName="registry-server" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.542763 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="201a4b8d-5ecb-4cc4-bacb-51d499efb485" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.543566 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.546170 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.546258 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.547668 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.547680 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.551011 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.563140 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt"] Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.621372 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.621466 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twl8k\" (UniqueName: \"kubernetes.io/projected/d19e7058-371b-4ac9-811a-949bc24e8b03-kube-api-access-twl8k\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.621498 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.621543 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.621658 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.723575 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.723794 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.723871 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.723935 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twl8k\" (UniqueName: \"kubernetes.io/projected/d19e7058-371b-4ac9-811a-949bc24e8b03-kube-api-access-twl8k\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.723962 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.727712 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.728124 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.729687 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.729705 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.742619 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twl8k\" (UniqueName: \"kubernetes.io/projected/d19e7058-371b-4ac9-811a-949bc24e8b03-kube-api-access-twl8k\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:14 crc kubenswrapper[4810]: I1203 06:18:14.887933 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:18:15 crc kubenswrapper[4810]: I1203 06:18:15.292508 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt"] Dec 03 06:18:15 crc kubenswrapper[4810]: I1203 06:18:15.412056 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" event={"ID":"d19e7058-371b-4ac9-811a-949bc24e8b03","Type":"ContainerStarted","Data":"4a536816e8d881db20794ee5fb143d239350ffebbf510e9e245f5c01346d5473"} Dec 03 06:18:16 crc kubenswrapper[4810]: I1203 06:18:16.425391 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" event={"ID":"d19e7058-371b-4ac9-811a-949bc24e8b03","Type":"ContainerStarted","Data":"b179652359780c58aa417938621ba8b1260b9766410d6c5c11237b4f240c3811"} Dec 03 06:18:16 crc kubenswrapper[4810]: I1203 06:18:16.459310 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" podStartSLOduration=2.002058378 podStartE2EDuration="2.459281303s" podCreationTimestamp="2025-12-03 06:18:14 +0000 UTC" firstStartedPulling="2025-12-03 06:18:15.301675707 +0000 UTC m=+2219.237136548" lastFinishedPulling="2025-12-03 06:18:15.758898592 +0000 UTC m=+2219.694359473" observedRunningTime="2025-12-03 06:18:16.446967267 +0000 UTC m=+2220.382428188" watchObservedRunningTime="2025-12-03 06:18:16.459281303 +0000 UTC m=+2220.394742174" Dec 03 06:18:55 crc kubenswrapper[4810]: I1203 06:18:55.677540 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:18:55 crc kubenswrapper[4810]: I1203 06:18:55.678339 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:19:25 crc kubenswrapper[4810]: I1203 06:19:25.677225 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:19:25 crc kubenswrapper[4810]: I1203 06:19:25.678107 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:19:55 crc kubenswrapper[4810]: I1203 06:19:55.678074 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:19:55 crc kubenswrapper[4810]: I1203 06:19:55.678663 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:19:55 crc kubenswrapper[4810]: I1203 06:19:55.678718 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:19:55 crc kubenswrapper[4810]: I1203 06:19:55.679625 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:19:55 crc kubenswrapper[4810]: I1203 06:19:55.679685 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" gracePeriod=600 Dec 03 06:19:55 crc kubenswrapper[4810]: E1203 06:19:55.828492 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:19:56 crc kubenswrapper[4810]: I1203 06:19:56.724314 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" exitCode=0 Dec 03 06:19:56 crc kubenswrapper[4810]: I1203 06:19:56.724416 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb"} Dec 03 06:19:56 crc kubenswrapper[4810]: I1203 06:19:56.724760 4810 scope.go:117] "RemoveContainer" containerID="f0c5fd3db41f9ed0ec1cfbdb47ba058ffd18f513197cccd37a5a7bb10b79784c" Dec 03 06:19:56 crc kubenswrapper[4810]: I1203 06:19:56.725549 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:19:56 crc kubenswrapper[4810]: E1203 06:19:56.725914 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:20:08 crc kubenswrapper[4810]: I1203 06:20:08.377604 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:20:08 crc kubenswrapper[4810]: E1203 06:20:08.378632 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:20:23 crc kubenswrapper[4810]: I1203 06:20:23.377795 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:20:23 crc kubenswrapper[4810]: E1203 06:20:23.378989 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.454015 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5zgx2"] Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.460055 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.477074 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5zgx2"] Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.557533 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-catalog-content\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.557604 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-utilities\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.557660 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctttq\" (UniqueName: \"kubernetes.io/projected/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-kube-api-access-ctttq\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.659309 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctttq\" (UniqueName: \"kubernetes.io/projected/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-kube-api-access-ctttq\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.659533 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-catalog-content\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.659573 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-utilities\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.660113 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-utilities\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.660665 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-catalog-content\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.681235 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctttq\" (UniqueName: \"kubernetes.io/projected/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-kube-api-access-ctttq\") pod \"redhat-marketplace-5zgx2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:31 crc kubenswrapper[4810]: I1203 06:20:31.796305 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:32 crc kubenswrapper[4810]: I1203 06:20:32.397262 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5zgx2"] Dec 03 06:20:33 crc kubenswrapper[4810]: I1203 06:20:33.176138 4810 generic.go:334] "Generic (PLEG): container finished" podID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerID="eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b" exitCode=0 Dec 03 06:20:33 crc kubenswrapper[4810]: I1203 06:20:33.176241 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerDied","Data":"eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b"} Dec 03 06:20:33 crc kubenswrapper[4810]: I1203 06:20:33.176594 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerStarted","Data":"a98a8b70d87925aa412f82461dace68b76adc347c7d57c826d8ece292c56d76e"} Dec 03 06:20:33 crc kubenswrapper[4810]: I1203 06:20:33.180612 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:20:34 crc kubenswrapper[4810]: I1203 06:20:34.192039 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerStarted","Data":"67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a"} Dec 03 06:20:35 crc kubenswrapper[4810]: I1203 06:20:35.206588 4810 generic.go:334] "Generic (PLEG): container finished" podID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerID="67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a" exitCode=0 Dec 03 06:20:35 crc kubenswrapper[4810]: I1203 06:20:35.206656 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerDied","Data":"67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a"} Dec 03 06:20:36 crc kubenswrapper[4810]: I1203 06:20:36.221314 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerStarted","Data":"6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803"} Dec 03 06:20:36 crc kubenswrapper[4810]: I1203 06:20:36.247542 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5zgx2" podStartSLOduration=2.7587220329999997 podStartE2EDuration="5.247513672s" podCreationTimestamp="2025-12-03 06:20:31 +0000 UTC" firstStartedPulling="2025-12-03 06:20:33.180208572 +0000 UTC m=+2357.115669453" lastFinishedPulling="2025-12-03 06:20:35.669000241 +0000 UTC m=+2359.604461092" observedRunningTime="2025-12-03 06:20:36.24628526 +0000 UTC m=+2360.181746111" watchObservedRunningTime="2025-12-03 06:20:36.247513672 +0000 UTC m=+2360.182974523" Dec 03 06:20:38 crc kubenswrapper[4810]: I1203 06:20:38.379695 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:20:38 crc kubenswrapper[4810]: E1203 06:20:38.380318 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:20:41 crc kubenswrapper[4810]: I1203 06:20:41.798850 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:41 crc kubenswrapper[4810]: I1203 06:20:41.799417 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:41 crc kubenswrapper[4810]: I1203 06:20:41.881423 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:42 crc kubenswrapper[4810]: I1203 06:20:42.401533 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:42 crc kubenswrapper[4810]: I1203 06:20:42.479695 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5zgx2"] Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.338178 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5zgx2" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="registry-server" containerID="cri-o://6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803" gracePeriod=2 Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.864039 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.961083 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctttq\" (UniqueName: \"kubernetes.io/projected/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-kube-api-access-ctttq\") pod \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.961686 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-catalog-content\") pod \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.961859 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-utilities\") pod \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\" (UID: \"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2\") " Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.963423 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-utilities" (OuterVolumeSpecName: "utilities") pod "f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" (UID: "f85c19b4-65d4-492a-b0fb-e31c6a6abfb2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.972013 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-kube-api-access-ctttq" (OuterVolumeSpecName: "kube-api-access-ctttq") pod "f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" (UID: "f85c19b4-65d4-492a-b0fb-e31c6a6abfb2"). InnerVolumeSpecName "kube-api-access-ctttq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:20:44 crc kubenswrapper[4810]: I1203 06:20:44.986612 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" (UID: "f85c19b4-65d4-492a-b0fb-e31c6a6abfb2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.065397 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.065450 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctttq\" (UniqueName: \"kubernetes.io/projected/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-kube-api-access-ctttq\") on node \"crc\" DevicePath \"\"" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.065472 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.356368 4810 generic.go:334] "Generic (PLEG): container finished" podID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerID="6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803" exitCode=0 Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.356457 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerDied","Data":"6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803"} Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.356522 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5zgx2" event={"ID":"f85c19b4-65d4-492a-b0fb-e31c6a6abfb2","Type":"ContainerDied","Data":"a98a8b70d87925aa412f82461dace68b76adc347c7d57c826d8ece292c56d76e"} Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.356579 4810 scope.go:117] "RemoveContainer" containerID="6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.357931 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5zgx2" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.395555 4810 scope.go:117] "RemoveContainer" containerID="67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.440017 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5zgx2"] Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.444863 4810 scope.go:117] "RemoveContainer" containerID="eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.450165 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5zgx2"] Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.484687 4810 scope.go:117] "RemoveContainer" containerID="6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803" Dec 03 06:20:45 crc kubenswrapper[4810]: E1203 06:20:45.485332 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803\": container with ID starting with 6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803 not found: ID does not exist" containerID="6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.485391 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803"} err="failed to get container status \"6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803\": rpc error: code = NotFound desc = could not find container \"6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803\": container with ID starting with 6ca32ea48f2a7f4441fe34d3fccdb339fbd7b404769736b9da4a44f65cf74803 not found: ID does not exist" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.485425 4810 scope.go:117] "RemoveContainer" containerID="67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a" Dec 03 06:20:45 crc kubenswrapper[4810]: E1203 06:20:45.485762 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a\": container with ID starting with 67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a not found: ID does not exist" containerID="67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.485790 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a"} err="failed to get container status \"67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a\": rpc error: code = NotFound desc = could not find container \"67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a\": container with ID starting with 67c7d0968dcdad0d74cda5fe8379f1c58eb82202ee9fe4679f7aca705cede33a not found: ID does not exist" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.485807 4810 scope.go:117] "RemoveContainer" containerID="eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b" Dec 03 06:20:45 crc kubenswrapper[4810]: E1203 06:20:45.489032 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b\": container with ID starting with eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b not found: ID does not exist" containerID="eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b" Dec 03 06:20:45 crc kubenswrapper[4810]: I1203 06:20:45.489086 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b"} err="failed to get container status \"eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b\": rpc error: code = NotFound desc = could not find container \"eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b\": container with ID starting with eafe136c4583d4a79fc8ca6cde00121aee4d18a986977a2fb00fd4f910f1a06b not found: ID does not exist" Dec 03 06:20:46 crc kubenswrapper[4810]: I1203 06:20:46.389523 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" path="/var/lib/kubelet/pods/f85c19b4-65d4-492a-b0fb-e31c6a6abfb2/volumes" Dec 03 06:20:49 crc kubenswrapper[4810]: I1203 06:20:49.378043 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:20:49 crc kubenswrapper[4810]: E1203 06:20:49.378800 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:21:04 crc kubenswrapper[4810]: I1203 06:21:04.378214 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:21:04 crc kubenswrapper[4810]: E1203 06:21:04.381392 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:21:19 crc kubenswrapper[4810]: I1203 06:21:19.377965 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:21:19 crc kubenswrapper[4810]: E1203 06:21:19.378721 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:21:34 crc kubenswrapper[4810]: I1203 06:21:34.378134 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:21:34 crc kubenswrapper[4810]: E1203 06:21:34.379310 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:21:47 crc kubenswrapper[4810]: I1203 06:21:47.377472 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:21:47 crc kubenswrapper[4810]: E1203 06:21:47.378669 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:22:01 crc kubenswrapper[4810]: I1203 06:22:01.378867 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:22:01 crc kubenswrapper[4810]: E1203 06:22:01.380005 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:22:13 crc kubenswrapper[4810]: I1203 06:22:13.378572 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:22:13 crc kubenswrapper[4810]: E1203 06:22:13.380017 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:22:28 crc kubenswrapper[4810]: I1203 06:22:28.378977 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:22:28 crc kubenswrapper[4810]: E1203 06:22:28.380370 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:22:39 crc kubenswrapper[4810]: I1203 06:22:39.378068 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:22:39 crc kubenswrapper[4810]: E1203 06:22:39.379479 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:22:54 crc kubenswrapper[4810]: I1203 06:22:54.377183 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:22:54 crc kubenswrapper[4810]: E1203 06:22:54.378461 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:22:59 crc kubenswrapper[4810]: I1203 06:22:59.106560 4810 generic.go:334] "Generic (PLEG): container finished" podID="d19e7058-371b-4ac9-811a-949bc24e8b03" containerID="b179652359780c58aa417938621ba8b1260b9766410d6c5c11237b4f240c3811" exitCode=0 Dec 03 06:22:59 crc kubenswrapper[4810]: I1203 06:22:59.106703 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" event={"ID":"d19e7058-371b-4ac9-811a-949bc24e8b03","Type":"ContainerDied","Data":"b179652359780c58aa417938621ba8b1260b9766410d6c5c11237b4f240c3811"} Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.694513 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.717695 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-ssh-key\") pod \"d19e7058-371b-4ac9-811a-949bc24e8b03\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.717843 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twl8k\" (UniqueName: \"kubernetes.io/projected/d19e7058-371b-4ac9-811a-949bc24e8b03-kube-api-access-twl8k\") pod \"d19e7058-371b-4ac9-811a-949bc24e8b03\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.718082 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-secret-0\") pod \"d19e7058-371b-4ac9-811a-949bc24e8b03\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.718283 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-combined-ca-bundle\") pod \"d19e7058-371b-4ac9-811a-949bc24e8b03\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.718479 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-inventory\") pod \"d19e7058-371b-4ac9-811a-949bc24e8b03\" (UID: \"d19e7058-371b-4ac9-811a-949bc24e8b03\") " Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.726314 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d19e7058-371b-4ac9-811a-949bc24e8b03" (UID: "d19e7058-371b-4ac9-811a-949bc24e8b03"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.737260 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d19e7058-371b-4ac9-811a-949bc24e8b03-kube-api-access-twl8k" (OuterVolumeSpecName: "kube-api-access-twl8k") pod "d19e7058-371b-4ac9-811a-949bc24e8b03" (UID: "d19e7058-371b-4ac9-811a-949bc24e8b03"). InnerVolumeSpecName "kube-api-access-twl8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.751340 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "d19e7058-371b-4ac9-811a-949bc24e8b03" (UID: "d19e7058-371b-4ac9-811a-949bc24e8b03"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.752148 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-inventory" (OuterVolumeSpecName: "inventory") pod "d19e7058-371b-4ac9-811a-949bc24e8b03" (UID: "d19e7058-371b-4ac9-811a-949bc24e8b03"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.787247 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d19e7058-371b-4ac9-811a-949bc24e8b03" (UID: "d19e7058-371b-4ac9-811a-949bc24e8b03"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.821654 4810 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.821692 4810 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.821709 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.821721 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d19e7058-371b-4ac9-811a-949bc24e8b03-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:23:00 crc kubenswrapper[4810]: I1203 06:23:00.821747 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twl8k\" (UniqueName: \"kubernetes.io/projected/d19e7058-371b-4ac9-811a-949bc24e8b03-kube-api-access-twl8k\") on node \"crc\" DevicePath \"\"" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.128141 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" event={"ID":"d19e7058-371b-4ac9-811a-949bc24e8b03","Type":"ContainerDied","Data":"4a536816e8d881db20794ee5fb143d239350ffebbf510e9e245f5c01346d5473"} Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.128184 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a536816e8d881db20794ee5fb143d239350ffebbf510e9e245f5c01346d5473" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.128219 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.264320 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt"] Dec 03 06:23:01 crc kubenswrapper[4810]: E1203 06:23:01.265722 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="extract-content" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.265806 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="extract-content" Dec 03 06:23:01 crc kubenswrapper[4810]: E1203 06:23:01.265842 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="extract-utilities" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.265857 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="extract-utilities" Dec 03 06:23:01 crc kubenswrapper[4810]: E1203 06:23:01.265898 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="registry-server" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.265911 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="registry-server" Dec 03 06:23:01 crc kubenswrapper[4810]: E1203 06:23:01.265954 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d19e7058-371b-4ac9-811a-949bc24e8b03" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.265967 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d19e7058-371b-4ac9-811a-949bc24e8b03" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.266325 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d19e7058-371b-4ac9-811a-949bc24e8b03" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.266369 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85c19b4-65d4-492a-b0fb-e31c6a6abfb2" containerName="registry-server" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.267498 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.273765 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.273841 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.274380 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.274409 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.274481 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.274677 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.275140 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.299281 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt"] Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433003 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433075 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433103 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433143 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433250 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433306 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433358 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433380 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.433405 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-677ch\" (UniqueName: \"kubernetes.io/projected/52125ad1-c593-45bd-b8d0-9a46aa72f614-kube-api-access-677ch\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.535168 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.536116 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.536153 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.536198 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.536322 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.536417 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.537219 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.537274 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.537310 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-677ch\" (UniqueName: \"kubernetes.io/projected/52125ad1-c593-45bd-b8d0-9a46aa72f614-kube-api-access-677ch\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.537717 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.546062 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.547098 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.548647 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.549525 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.559428 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.559619 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.560092 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.569445 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-677ch\" (UniqueName: \"kubernetes.io/projected/52125ad1-c593-45bd-b8d0-9a46aa72f614-kube-api-access-677ch\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fmtpt\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:01 crc kubenswrapper[4810]: I1203 06:23:01.593998 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:23:02 crc kubenswrapper[4810]: I1203 06:23:02.009260 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt"] Dec 03 06:23:02 crc kubenswrapper[4810]: I1203 06:23:02.142760 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" event={"ID":"52125ad1-c593-45bd-b8d0-9a46aa72f614","Type":"ContainerStarted","Data":"e83eb7a755167ffd86e91c4e0c07eb97b8220a5ada13119ac34c3501e0d8f9be"} Dec 03 06:23:03 crc kubenswrapper[4810]: I1203 06:23:03.161998 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" event={"ID":"52125ad1-c593-45bd-b8d0-9a46aa72f614","Type":"ContainerStarted","Data":"c59f2404c2313233945b9e0f08aaadd518ef8b98829ee3cb3f07344dc9335181"} Dec 03 06:23:03 crc kubenswrapper[4810]: I1203 06:23:03.195586 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" podStartSLOduration=1.627388634 podStartE2EDuration="2.195556333s" podCreationTimestamp="2025-12-03 06:23:01 +0000 UTC" firstStartedPulling="2025-12-03 06:23:02.019332036 +0000 UTC m=+2505.954792877" lastFinishedPulling="2025-12-03 06:23:02.587499695 +0000 UTC m=+2506.522960576" observedRunningTime="2025-12-03 06:23:03.192260146 +0000 UTC m=+2507.127721007" watchObservedRunningTime="2025-12-03 06:23:03.195556333 +0000 UTC m=+2507.131017174" Dec 03 06:23:08 crc kubenswrapper[4810]: I1203 06:23:08.377529 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:23:08 crc kubenswrapper[4810]: E1203 06:23:08.378914 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:23:19 crc kubenswrapper[4810]: I1203 06:23:19.377595 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:23:19 crc kubenswrapper[4810]: E1203 06:23:19.378838 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:23:29 crc kubenswrapper[4810]: I1203 06:23:29.453680 4810 scope.go:117] "RemoveContainer" containerID="d308c9f2d21d0f053ed8cb4ff142a803a60018624d4df740ef1127ffd38c7039" Dec 03 06:23:29 crc kubenswrapper[4810]: I1203 06:23:29.542325 4810 scope.go:117] "RemoveContainer" containerID="2a10c70294d6d0fdb0dfc5077c85ca4ee61752fea30aad9be58c7a98c358ae53" Dec 03 06:23:29 crc kubenswrapper[4810]: I1203 06:23:29.576295 4810 scope.go:117] "RemoveContainer" containerID="ba9d9f6f82ef4bf6a565fb450da4fb3d70ba015c40d040e406d9379c6a33a0aa" Dec 03 06:23:32 crc kubenswrapper[4810]: I1203 06:23:32.381942 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:23:32 crc kubenswrapper[4810]: E1203 06:23:32.382756 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:23:43 crc kubenswrapper[4810]: I1203 06:23:43.378211 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:23:43 crc kubenswrapper[4810]: E1203 06:23:43.379370 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:23:55 crc kubenswrapper[4810]: I1203 06:23:55.378323 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:23:55 crc kubenswrapper[4810]: E1203 06:23:55.379439 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:24:10 crc kubenswrapper[4810]: I1203 06:24:10.377878 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:24:10 crc kubenswrapper[4810]: E1203 06:24:10.378901 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:24:22 crc kubenswrapper[4810]: I1203 06:24:22.377842 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:24:22 crc kubenswrapper[4810]: E1203 06:24:22.378624 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:24:35 crc kubenswrapper[4810]: I1203 06:24:35.378192 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:24:35 crc kubenswrapper[4810]: E1203 06:24:35.381438 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.555861 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zhc9k"] Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.561230 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.570536 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zhc9k"] Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.683276 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-catalog-content\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.683543 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-utilities\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.683876 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lwxs\" (UniqueName: \"kubernetes.io/projected/9982ecae-9af1-4559-b4ad-13b61ff992a5-kube-api-access-9lwxs\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.786640 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lwxs\" (UniqueName: \"kubernetes.io/projected/9982ecae-9af1-4559-b4ad-13b61ff992a5-kube-api-access-9lwxs\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.786876 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-catalog-content\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.786939 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-utilities\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.787649 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-utilities\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.788512 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-catalog-content\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.810296 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lwxs\" (UniqueName: \"kubernetes.io/projected/9982ecae-9af1-4559-b4ad-13b61ff992a5-kube-api-access-9lwxs\") pod \"community-operators-zhc9k\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:38 crc kubenswrapper[4810]: I1203 06:24:38.899481 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:39 crc kubenswrapper[4810]: I1203 06:24:39.504080 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zhc9k"] Dec 03 06:24:40 crc kubenswrapper[4810]: I1203 06:24:40.458865 4810 generic.go:334] "Generic (PLEG): container finished" podID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerID="740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af" exitCode=0 Dec 03 06:24:40 crc kubenswrapper[4810]: I1203 06:24:40.459168 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhc9k" event={"ID":"9982ecae-9af1-4559-b4ad-13b61ff992a5","Type":"ContainerDied","Data":"740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af"} Dec 03 06:24:40 crc kubenswrapper[4810]: I1203 06:24:40.459215 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhc9k" event={"ID":"9982ecae-9af1-4559-b4ad-13b61ff992a5","Type":"ContainerStarted","Data":"e67923915cb7d18174da713f2f0e75702185b6c77c6353230df0584fa17cbdff"} Dec 03 06:24:42 crc kubenswrapper[4810]: I1203 06:24:42.484263 4810 generic.go:334] "Generic (PLEG): container finished" podID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerID="7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4" exitCode=0 Dec 03 06:24:42 crc kubenswrapper[4810]: I1203 06:24:42.484397 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhc9k" event={"ID":"9982ecae-9af1-4559-b4ad-13b61ff992a5","Type":"ContainerDied","Data":"7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4"} Dec 03 06:24:43 crc kubenswrapper[4810]: I1203 06:24:43.500859 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhc9k" event={"ID":"9982ecae-9af1-4559-b4ad-13b61ff992a5","Type":"ContainerStarted","Data":"bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e"} Dec 03 06:24:43 crc kubenswrapper[4810]: I1203 06:24:43.534670 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zhc9k" podStartSLOduration=3.123073339 podStartE2EDuration="5.534643615s" podCreationTimestamp="2025-12-03 06:24:38 +0000 UTC" firstStartedPulling="2025-12-03 06:24:40.461655967 +0000 UTC m=+2604.397116808" lastFinishedPulling="2025-12-03 06:24:42.873226213 +0000 UTC m=+2606.808687084" observedRunningTime="2025-12-03 06:24:43.529929411 +0000 UTC m=+2607.465390312" watchObservedRunningTime="2025-12-03 06:24:43.534643615 +0000 UTC m=+2607.470104486" Dec 03 06:24:47 crc kubenswrapper[4810]: I1203 06:24:47.377880 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:24:47 crc kubenswrapper[4810]: E1203 06:24:47.379049 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:24:48 crc kubenswrapper[4810]: I1203 06:24:48.900465 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:48 crc kubenswrapper[4810]: I1203 06:24:48.900536 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:48 crc kubenswrapper[4810]: I1203 06:24:48.989368 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:49 crc kubenswrapper[4810]: I1203 06:24:49.690259 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:49 crc kubenswrapper[4810]: I1203 06:24:49.749495 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zhc9k"] Dec 03 06:24:51 crc kubenswrapper[4810]: I1203 06:24:51.643420 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zhc9k" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="registry-server" containerID="cri-o://bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e" gracePeriod=2 Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.101577 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.231863 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lwxs\" (UniqueName: \"kubernetes.io/projected/9982ecae-9af1-4559-b4ad-13b61ff992a5-kube-api-access-9lwxs\") pod \"9982ecae-9af1-4559-b4ad-13b61ff992a5\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.232007 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-catalog-content\") pod \"9982ecae-9af1-4559-b4ad-13b61ff992a5\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.232101 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-utilities\") pod \"9982ecae-9af1-4559-b4ad-13b61ff992a5\" (UID: \"9982ecae-9af1-4559-b4ad-13b61ff992a5\") " Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.232901 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-utilities" (OuterVolumeSpecName: "utilities") pod "9982ecae-9af1-4559-b4ad-13b61ff992a5" (UID: "9982ecae-9af1-4559-b4ad-13b61ff992a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.237825 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9982ecae-9af1-4559-b4ad-13b61ff992a5-kube-api-access-9lwxs" (OuterVolumeSpecName: "kube-api-access-9lwxs") pod "9982ecae-9af1-4559-b4ad-13b61ff992a5" (UID: "9982ecae-9af1-4559-b4ad-13b61ff992a5"). InnerVolumeSpecName "kube-api-access-9lwxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.274593 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9982ecae-9af1-4559-b4ad-13b61ff992a5" (UID: "9982ecae-9af1-4559-b4ad-13b61ff992a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.334043 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.334084 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lwxs\" (UniqueName: \"kubernetes.io/projected/9982ecae-9af1-4559-b4ad-13b61ff992a5-kube-api-access-9lwxs\") on node \"crc\" DevicePath \"\"" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.334100 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9982ecae-9af1-4559-b4ad-13b61ff992a5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.658260 4810 generic.go:334] "Generic (PLEG): container finished" podID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerID="bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e" exitCode=0 Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.658340 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhc9k" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.658367 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhc9k" event={"ID":"9982ecae-9af1-4559-b4ad-13b61ff992a5","Type":"ContainerDied","Data":"bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e"} Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.658832 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhc9k" event={"ID":"9982ecae-9af1-4559-b4ad-13b61ff992a5","Type":"ContainerDied","Data":"e67923915cb7d18174da713f2f0e75702185b6c77c6353230df0584fa17cbdff"} Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.658878 4810 scope.go:117] "RemoveContainer" containerID="bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.688539 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zhc9k"] Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.699357 4810 scope.go:117] "RemoveContainer" containerID="7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.700562 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zhc9k"] Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.740029 4810 scope.go:117] "RemoveContainer" containerID="740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.799258 4810 scope.go:117] "RemoveContainer" containerID="bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e" Dec 03 06:24:52 crc kubenswrapper[4810]: E1203 06:24:52.800002 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e\": container with ID starting with bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e not found: ID does not exist" containerID="bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.800034 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e"} err="failed to get container status \"bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e\": rpc error: code = NotFound desc = could not find container \"bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e\": container with ID starting with bb7606bba95db728a623bd194c37ced7a59b0e30e5aefcc9368cdfa11605df9e not found: ID does not exist" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.800060 4810 scope.go:117] "RemoveContainer" containerID="7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4" Dec 03 06:24:52 crc kubenswrapper[4810]: E1203 06:24:52.800380 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4\": container with ID starting with 7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4 not found: ID does not exist" containerID="7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.800406 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4"} err="failed to get container status \"7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4\": rpc error: code = NotFound desc = could not find container \"7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4\": container with ID starting with 7ff06f41758f92eff6c6e15b0a614fd7f1b045d19df33804d21fa2bb6a5f01e4 not found: ID does not exist" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.800425 4810 scope.go:117] "RemoveContainer" containerID="740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af" Dec 03 06:24:52 crc kubenswrapper[4810]: E1203 06:24:52.800645 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af\": container with ID starting with 740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af not found: ID does not exist" containerID="740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af" Dec 03 06:24:52 crc kubenswrapper[4810]: I1203 06:24:52.800669 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af"} err="failed to get container status \"740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af\": rpc error: code = NotFound desc = could not find container \"740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af\": container with ID starting with 740e7e86c621fa4a9e7024b4b10002ed78ae9d2d8b42fba24be9161e1da4a4af not found: ID does not exist" Dec 03 06:24:54 crc kubenswrapper[4810]: I1203 06:24:54.422967 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" path="/var/lib/kubelet/pods/9982ecae-9af1-4559-b4ad-13b61ff992a5/volumes" Dec 03 06:25:00 crc kubenswrapper[4810]: I1203 06:25:00.378002 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:25:00 crc kubenswrapper[4810]: I1203 06:25:00.751424 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"aed3eca2d161864fe01853fa25f501df5345a11188ef8ea66db21e2e21153b23"} Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.799691 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sv2m8"] Dec 03 06:25:51 crc kubenswrapper[4810]: E1203 06:25:51.800983 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="registry-server" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.801007 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="registry-server" Dec 03 06:25:51 crc kubenswrapper[4810]: E1203 06:25:51.801032 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="extract-content" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.801042 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="extract-content" Dec 03 06:25:51 crc kubenswrapper[4810]: E1203 06:25:51.801064 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="extract-utilities" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.801075 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="extract-utilities" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.801430 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="9982ecae-9af1-4559-b4ad-13b61ff992a5" containerName="registry-server" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.803817 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.817300 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sv2m8"] Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.970157 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-catalog-content\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.970574 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svnzh\" (UniqueName: \"kubernetes.io/projected/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-kube-api-access-svnzh\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:51 crc kubenswrapper[4810]: I1203 06:25:51.970646 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-utilities\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.073283 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-catalog-content\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.073408 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svnzh\" (UniqueName: \"kubernetes.io/projected/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-kube-api-access-svnzh\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.073492 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-utilities\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.074398 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-utilities\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.074837 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-catalog-content\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.108372 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svnzh\" (UniqueName: \"kubernetes.io/projected/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-kube-api-access-svnzh\") pod \"redhat-operators-sv2m8\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.135613 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.413066 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sv2m8"] Dec 03 06:25:52 crc kubenswrapper[4810]: I1203 06:25:52.456011 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerStarted","Data":"0d3046a7152bdcfb00c0a7097e9dfbae33c2bfe2df4627f420f338690a905bed"} Dec 03 06:25:53 crc kubenswrapper[4810]: I1203 06:25:53.470822 4810 generic.go:334] "Generic (PLEG): container finished" podID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerID="66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af" exitCode=0 Dec 03 06:25:53 crc kubenswrapper[4810]: I1203 06:25:53.470983 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerDied","Data":"66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af"} Dec 03 06:25:53 crc kubenswrapper[4810]: I1203 06:25:53.473155 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:25:55 crc kubenswrapper[4810]: I1203 06:25:55.501409 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerStarted","Data":"3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08"} Dec 03 06:25:56 crc kubenswrapper[4810]: I1203 06:25:56.516545 4810 generic.go:334] "Generic (PLEG): container finished" podID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerID="3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08" exitCode=0 Dec 03 06:25:56 crc kubenswrapper[4810]: I1203 06:25:56.516610 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerDied","Data":"3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08"} Dec 03 06:25:59 crc kubenswrapper[4810]: I1203 06:25:59.555923 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerStarted","Data":"e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b"} Dec 03 06:25:59 crc kubenswrapper[4810]: I1203 06:25:59.596533 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sv2m8" podStartSLOduration=3.405020321 podStartE2EDuration="8.596498104s" podCreationTimestamp="2025-12-03 06:25:51 +0000 UTC" firstStartedPulling="2025-12-03 06:25:53.472894137 +0000 UTC m=+2677.408354978" lastFinishedPulling="2025-12-03 06:25:58.66437192 +0000 UTC m=+2682.599832761" observedRunningTime="2025-12-03 06:25:59.58721643 +0000 UTC m=+2683.522677311" watchObservedRunningTime="2025-12-03 06:25:59.596498104 +0000 UTC m=+2683.531958975" Dec 03 06:26:02 crc kubenswrapper[4810]: I1203 06:26:02.136522 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:26:02 crc kubenswrapper[4810]: I1203 06:26:02.137430 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:26:03 crc kubenswrapper[4810]: I1203 06:26:03.205422 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sv2m8" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="registry-server" probeResult="failure" output=< Dec 03 06:26:03 crc kubenswrapper[4810]: timeout: failed to connect service ":50051" within 1s Dec 03 06:26:03 crc kubenswrapper[4810]: > Dec 03 06:26:12 crc kubenswrapper[4810]: I1203 06:26:12.188870 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:26:12 crc kubenswrapper[4810]: I1203 06:26:12.246019 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:26:12 crc kubenswrapper[4810]: I1203 06:26:12.437079 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sv2m8"] Dec 03 06:26:13 crc kubenswrapper[4810]: I1203 06:26:13.707177 4810 generic.go:334] "Generic (PLEG): container finished" podID="52125ad1-c593-45bd-b8d0-9a46aa72f614" containerID="c59f2404c2313233945b9e0f08aaadd518ef8b98829ee3cb3f07344dc9335181" exitCode=0 Dec 03 06:26:13 crc kubenswrapper[4810]: I1203 06:26:13.707314 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" event={"ID":"52125ad1-c593-45bd-b8d0-9a46aa72f614","Type":"ContainerDied","Data":"c59f2404c2313233945b9e0f08aaadd518ef8b98829ee3cb3f07344dc9335181"} Dec 03 06:26:13 crc kubenswrapper[4810]: I1203 06:26:13.707801 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sv2m8" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="registry-server" containerID="cri-o://e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b" gracePeriod=2 Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.275531 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.403227 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-utilities\") pod \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.403386 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-catalog-content\") pod \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.403692 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svnzh\" (UniqueName: \"kubernetes.io/projected/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-kube-api-access-svnzh\") pod \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\" (UID: \"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747\") " Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.406558 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-utilities" (OuterVolumeSpecName: "utilities") pod "f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" (UID: "f5ebc501-6e9b-4ee8-92d2-e410ff3f4747"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.413918 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-kube-api-access-svnzh" (OuterVolumeSpecName: "kube-api-access-svnzh") pod "f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" (UID: "f5ebc501-6e9b-4ee8-92d2-e410ff3f4747"). InnerVolumeSpecName "kube-api-access-svnzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.509934 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svnzh\" (UniqueName: \"kubernetes.io/projected/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-kube-api-access-svnzh\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.509974 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.518937 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" (UID: "f5ebc501-6e9b-4ee8-92d2-e410ff3f4747"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.611607 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.722849 4810 generic.go:334] "Generic (PLEG): container finished" podID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerID="e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b" exitCode=0 Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.722928 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sv2m8" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.722970 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerDied","Data":"e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b"} Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.723002 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sv2m8" event={"ID":"f5ebc501-6e9b-4ee8-92d2-e410ff3f4747","Type":"ContainerDied","Data":"0d3046a7152bdcfb00c0a7097e9dfbae33c2bfe2df4627f420f338690a905bed"} Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.723021 4810 scope.go:117] "RemoveContainer" containerID="e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.773792 4810 scope.go:117] "RemoveContainer" containerID="3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.773929 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sv2m8"] Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.784325 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sv2m8"] Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.820215 4810 scope.go:117] "RemoveContainer" containerID="66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.856069 4810 scope.go:117] "RemoveContainer" containerID="e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b" Dec 03 06:26:14 crc kubenswrapper[4810]: E1203 06:26:14.857611 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b\": container with ID starting with e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b not found: ID does not exist" containerID="e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.857666 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b"} err="failed to get container status \"e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b\": rpc error: code = NotFound desc = could not find container \"e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b\": container with ID starting with e83cc003f33a9c91892eea7c495afa182fc16c971de0c81ef67de4bee79f813b not found: ID does not exist" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.857700 4810 scope.go:117] "RemoveContainer" containerID="3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08" Dec 03 06:26:14 crc kubenswrapper[4810]: E1203 06:26:14.858203 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08\": container with ID starting with 3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08 not found: ID does not exist" containerID="3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.858239 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08"} err="failed to get container status \"3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08\": rpc error: code = NotFound desc = could not find container \"3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08\": container with ID starting with 3b254cec7e493377da4273d8c9b9011c79e53759763381c4366e012b1b4b0c08 not found: ID does not exist" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.858264 4810 scope.go:117] "RemoveContainer" containerID="66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af" Dec 03 06:26:14 crc kubenswrapper[4810]: E1203 06:26:14.858550 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af\": container with ID starting with 66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af not found: ID does not exist" containerID="66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af" Dec 03 06:26:14 crc kubenswrapper[4810]: I1203 06:26:14.858574 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af"} err="failed to get container status \"66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af\": rpc error: code = NotFound desc = could not find container \"66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af\": container with ID starting with 66eba8caf06079f26867755d60aa4abfd44ae2b7aa6d21fa6eeedda4aa5f86af not found: ID does not exist" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.246924 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.426940 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-1\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427056 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-extra-config-0\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427168 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-combined-ca-bundle\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427279 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-0\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427355 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-677ch\" (UniqueName: \"kubernetes.io/projected/52125ad1-c593-45bd-b8d0-9a46aa72f614-kube-api-access-677ch\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427446 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-inventory\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427500 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-ssh-key\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427536 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-0\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.427599 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-1\") pod \"52125ad1-c593-45bd-b8d0-9a46aa72f614\" (UID: \"52125ad1-c593-45bd-b8d0-9a46aa72f614\") " Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.449671 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.449746 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52125ad1-c593-45bd-b8d0-9a46aa72f614-kube-api-access-677ch" (OuterVolumeSpecName: "kube-api-access-677ch") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "kube-api-access-677ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.466775 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.472104 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.475601 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.482442 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.489614 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.496039 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.498974 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-inventory" (OuterVolumeSpecName: "inventory") pod "52125ad1-c593-45bd-b8d0-9a46aa72f614" (UID: "52125ad1-c593-45bd-b8d0-9a46aa72f614"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531136 4810 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531421 4810 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531434 4810 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531446 4810 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531457 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-677ch\" (UniqueName: \"kubernetes.io/projected/52125ad1-c593-45bd-b8d0-9a46aa72f614-kube-api-access-677ch\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531470 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531482 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531493 4810 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.531503 4810 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/52125ad1-c593-45bd-b8d0-9a46aa72f614-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.740595 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" event={"ID":"52125ad1-c593-45bd-b8d0-9a46aa72f614","Type":"ContainerDied","Data":"e83eb7a755167ffd86e91c4e0c07eb97b8220a5ada13119ac34c3501e0d8f9be"} Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.740653 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e83eb7a755167ffd86e91c4e0c07eb97b8220a5ada13119ac34c3501e0d8f9be" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.740784 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fmtpt" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.888456 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf"] Dec 03 06:26:15 crc kubenswrapper[4810]: E1203 06:26:15.889099 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52125ad1-c593-45bd-b8d0-9a46aa72f614" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.889129 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="52125ad1-c593-45bd-b8d0-9a46aa72f614" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 06:26:15 crc kubenswrapper[4810]: E1203 06:26:15.889147 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="extract-utilities" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.889161 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="extract-utilities" Dec 03 06:26:15 crc kubenswrapper[4810]: E1203 06:26:15.889178 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="registry-server" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.889193 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="registry-server" Dec 03 06:26:15 crc kubenswrapper[4810]: E1203 06:26:15.889251 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="extract-content" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.889264 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="extract-content" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.889566 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" containerName="registry-server" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.889621 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="52125ad1-c593-45bd-b8d0-9a46aa72f614" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.890653 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.898680 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.898939 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.899138 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.899273 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-txplk" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.901019 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:26:15 crc kubenswrapper[4810]: I1203 06:26:15.916026 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf"] Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.041496 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.042282 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.042667 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.042928 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.043139 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv6wh\" (UniqueName: \"kubernetes.io/projected/91e736a7-e1a5-4b7c-9638-71c18367e234-kube-api-access-jv6wh\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.043438 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.043600 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.145638 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.145698 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.145816 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.145854 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.145987 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.146029 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.146072 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv6wh\" (UniqueName: \"kubernetes.io/projected/91e736a7-e1a5-4b7c-9638-71c18367e234-kube-api-access-jv6wh\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.152142 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.152160 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.152717 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.153149 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.154592 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.154838 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.170826 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv6wh\" (UniqueName: \"kubernetes.io/projected/91e736a7-e1a5-4b7c-9638-71c18367e234-kube-api-access-jv6wh\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-9zthf\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.219963 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.393618 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5ebc501-6e9b-4ee8-92d2-e410ff3f4747" path="/var/lib/kubelet/pods/f5ebc501-6e9b-4ee8-92d2-e410ff3f4747/volumes" Dec 03 06:26:16 crc kubenswrapper[4810]: I1203 06:26:16.819883 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf"] Dec 03 06:26:17 crc kubenswrapper[4810]: I1203 06:26:17.629940 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 06:26:17 crc kubenswrapper[4810]: I1203 06:26:17.759615 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" event={"ID":"91e736a7-e1a5-4b7c-9638-71c18367e234","Type":"ContainerStarted","Data":"9dbb62753c086a280a277c79a6968cb21e34e79fff80ef8a1643fb2f62d22ead"} Dec 03 06:26:18 crc kubenswrapper[4810]: I1203 06:26:18.772697 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" event={"ID":"91e736a7-e1a5-4b7c-9638-71c18367e234","Type":"ContainerStarted","Data":"1a3b1df6a5916dedc4176f749a76ed195344b27657cca73d1d779ff19d9fcf91"} Dec 03 06:26:18 crc kubenswrapper[4810]: I1203 06:26:18.801781 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" podStartSLOduration=3.007449281 podStartE2EDuration="3.801760678s" podCreationTimestamp="2025-12-03 06:26:15 +0000 UTC" firstStartedPulling="2025-12-03 06:26:16.831995236 +0000 UTC m=+2700.767456097" lastFinishedPulling="2025-12-03 06:26:17.626306623 +0000 UTC m=+2701.561767494" observedRunningTime="2025-12-03 06:26:18.800410002 +0000 UTC m=+2702.735870883" watchObservedRunningTime="2025-12-03 06:26:18.801760678 +0000 UTC m=+2702.737221529" Dec 03 06:27:25 crc kubenswrapper[4810]: I1203 06:27:25.677007 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:27:25 crc kubenswrapper[4810]: I1203 06:27:25.678027 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:27:55 crc kubenswrapper[4810]: I1203 06:27:55.677313 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:27:55 crc kubenswrapper[4810]: I1203 06:27:55.677934 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:28:25 crc kubenswrapper[4810]: I1203 06:28:25.676831 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:28:25 crc kubenswrapper[4810]: I1203 06:28:25.677587 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:28:25 crc kubenswrapper[4810]: I1203 06:28:25.677654 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:28:25 crc kubenswrapper[4810]: I1203 06:28:25.678776 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aed3eca2d161864fe01853fa25f501df5345a11188ef8ea66db21e2e21153b23"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:28:25 crc kubenswrapper[4810]: I1203 06:28:25.678875 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://aed3eca2d161864fe01853fa25f501df5345a11188ef8ea66db21e2e21153b23" gracePeriod=600 Dec 03 06:28:26 crc kubenswrapper[4810]: I1203 06:28:26.292776 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="aed3eca2d161864fe01853fa25f501df5345a11188ef8ea66db21e2e21153b23" exitCode=0 Dec 03 06:28:26 crc kubenswrapper[4810]: I1203 06:28:26.293122 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"aed3eca2d161864fe01853fa25f501df5345a11188ef8ea66db21e2e21153b23"} Dec 03 06:28:26 crc kubenswrapper[4810]: I1203 06:28:26.293149 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d"} Dec 03 06:28:26 crc kubenswrapper[4810]: I1203 06:28:26.293166 4810 scope.go:117] "RemoveContainer" containerID="6985821d0fb0c3178418bf6c140ce924ded76bd5b79d0a489985db20f06931eb" Dec 03 06:28:27 crc kubenswrapper[4810]: I1203 06:28:27.869482 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8xbvf"] Dec 03 06:28:27 crc kubenswrapper[4810]: I1203 06:28:27.873833 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:27 crc kubenswrapper[4810]: I1203 06:28:27.895012 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8xbvf"] Dec 03 06:28:27 crc kubenswrapper[4810]: I1203 06:28:27.987887 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t68n\" (UniqueName: \"kubernetes.io/projected/95178480-806b-44e5-9211-abc51429aca5-kube-api-access-7t68n\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:27 crc kubenswrapper[4810]: I1203 06:28:27.988399 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-utilities\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:27 crc kubenswrapper[4810]: I1203 06:28:27.988813 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-catalog-content\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.091027 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-catalog-content\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.091164 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t68n\" (UniqueName: \"kubernetes.io/projected/95178480-806b-44e5-9211-abc51429aca5-kube-api-access-7t68n\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.091255 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-utilities\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.091853 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-utilities\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.092138 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-catalog-content\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.122886 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t68n\" (UniqueName: \"kubernetes.io/projected/95178480-806b-44e5-9211-abc51429aca5-kube-api-access-7t68n\") pod \"certified-operators-8xbvf\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.209897 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:28 crc kubenswrapper[4810]: I1203 06:28:28.719435 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8xbvf"] Dec 03 06:28:29 crc kubenswrapper[4810]: I1203 06:28:29.373560 4810 generic.go:334] "Generic (PLEG): container finished" podID="95178480-806b-44e5-9211-abc51429aca5" containerID="e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f" exitCode=0 Dec 03 06:28:29 crc kubenswrapper[4810]: I1203 06:28:29.373636 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xbvf" event={"ID":"95178480-806b-44e5-9211-abc51429aca5","Type":"ContainerDied","Data":"e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f"} Dec 03 06:28:29 crc kubenswrapper[4810]: I1203 06:28:29.373690 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xbvf" event={"ID":"95178480-806b-44e5-9211-abc51429aca5","Type":"ContainerStarted","Data":"4f4ba7853b7d19910ff24433651c76e24c55d2017b64fb088e898beed66aa935"} Dec 03 06:28:31 crc kubenswrapper[4810]: I1203 06:28:31.405495 4810 generic.go:334] "Generic (PLEG): container finished" podID="95178480-806b-44e5-9211-abc51429aca5" containerID="a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2" exitCode=0 Dec 03 06:28:31 crc kubenswrapper[4810]: I1203 06:28:31.405622 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xbvf" event={"ID":"95178480-806b-44e5-9211-abc51429aca5","Type":"ContainerDied","Data":"a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2"} Dec 03 06:28:32 crc kubenswrapper[4810]: I1203 06:28:32.421406 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xbvf" event={"ID":"95178480-806b-44e5-9211-abc51429aca5","Type":"ContainerStarted","Data":"7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad"} Dec 03 06:28:32 crc kubenswrapper[4810]: I1203 06:28:32.456813 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8xbvf" podStartSLOduration=2.93241615 podStartE2EDuration="5.456777324s" podCreationTimestamp="2025-12-03 06:28:27 +0000 UTC" firstStartedPulling="2025-12-03 06:28:29.379085412 +0000 UTC m=+2833.314546263" lastFinishedPulling="2025-12-03 06:28:31.903446576 +0000 UTC m=+2835.838907437" observedRunningTime="2025-12-03 06:28:32.447718815 +0000 UTC m=+2836.383179666" watchObservedRunningTime="2025-12-03 06:28:32.456777324 +0000 UTC m=+2836.392238225" Dec 03 06:28:38 crc kubenswrapper[4810]: I1203 06:28:38.210129 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:38 crc kubenswrapper[4810]: I1203 06:28:38.210870 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:38 crc kubenswrapper[4810]: I1203 06:28:38.288330 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:38 crc kubenswrapper[4810]: I1203 06:28:38.554340 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:38 crc kubenswrapper[4810]: I1203 06:28:38.629060 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8xbvf"] Dec 03 06:28:40 crc kubenswrapper[4810]: I1203 06:28:40.503797 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8xbvf" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="registry-server" containerID="cri-o://7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad" gracePeriod=2 Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.038391 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.097460 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-utilities\") pod \"95178480-806b-44e5-9211-abc51429aca5\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.097629 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-catalog-content\") pod \"95178480-806b-44e5-9211-abc51429aca5\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.097664 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t68n\" (UniqueName: \"kubernetes.io/projected/95178480-806b-44e5-9211-abc51429aca5-kube-api-access-7t68n\") pod \"95178480-806b-44e5-9211-abc51429aca5\" (UID: \"95178480-806b-44e5-9211-abc51429aca5\") " Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.098448 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-utilities" (OuterVolumeSpecName: "utilities") pod "95178480-806b-44e5-9211-abc51429aca5" (UID: "95178480-806b-44e5-9211-abc51429aca5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.101282 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.104001 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95178480-806b-44e5-9211-abc51429aca5-kube-api-access-7t68n" (OuterVolumeSpecName: "kube-api-access-7t68n") pod "95178480-806b-44e5-9211-abc51429aca5" (UID: "95178480-806b-44e5-9211-abc51429aca5"). InnerVolumeSpecName "kube-api-access-7t68n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.203578 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t68n\" (UniqueName: \"kubernetes.io/projected/95178480-806b-44e5-9211-abc51429aca5-kube-api-access-7t68n\") on node \"crc\" DevicePath \"\"" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.363878 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95178480-806b-44e5-9211-abc51429aca5" (UID: "95178480-806b-44e5-9211-abc51429aca5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.408093 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95178480-806b-44e5-9211-abc51429aca5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.519220 4810 generic.go:334] "Generic (PLEG): container finished" podID="95178480-806b-44e5-9211-abc51429aca5" containerID="7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad" exitCode=0 Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.519277 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xbvf" event={"ID":"95178480-806b-44e5-9211-abc51429aca5","Type":"ContainerDied","Data":"7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad"} Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.519308 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xbvf" event={"ID":"95178480-806b-44e5-9211-abc51429aca5","Type":"ContainerDied","Data":"4f4ba7853b7d19910ff24433651c76e24c55d2017b64fb088e898beed66aa935"} Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.519328 4810 scope.go:117] "RemoveContainer" containerID="7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.519506 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xbvf" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.585641 4810 scope.go:117] "RemoveContainer" containerID="a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.586949 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8xbvf"] Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.619312 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8xbvf"] Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.638361 4810 scope.go:117] "RemoveContainer" containerID="e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.670469 4810 scope.go:117] "RemoveContainer" containerID="7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad" Dec 03 06:28:41 crc kubenswrapper[4810]: E1203 06:28:41.671145 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad\": container with ID starting with 7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad not found: ID does not exist" containerID="7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.671406 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad"} err="failed to get container status \"7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad\": rpc error: code = NotFound desc = could not find container \"7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad\": container with ID starting with 7400270aa617cd4204cf9aa43f590a9ab57555d82b1c2451e7c81c452986f9ad not found: ID does not exist" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.671434 4810 scope.go:117] "RemoveContainer" containerID="a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2" Dec 03 06:28:41 crc kubenswrapper[4810]: E1203 06:28:41.671935 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2\": container with ID starting with a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2 not found: ID does not exist" containerID="a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.671968 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2"} err="failed to get container status \"a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2\": rpc error: code = NotFound desc = could not find container \"a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2\": container with ID starting with a496eb29f95584b9eed860f3eead03271fc29338e88316a1bc546ab1b7f038a2 not found: ID does not exist" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.671990 4810 scope.go:117] "RemoveContainer" containerID="e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f" Dec 03 06:28:41 crc kubenswrapper[4810]: E1203 06:28:41.672303 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f\": container with ID starting with e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f not found: ID does not exist" containerID="e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f" Dec 03 06:28:41 crc kubenswrapper[4810]: I1203 06:28:41.672323 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f"} err="failed to get container status \"e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f\": rpc error: code = NotFound desc = could not find container \"e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f\": container with ID starting with e45e58a33882d81927e82d3835d5d4e2b148140e0c0d1f16ed91e3760090be9f not found: ID does not exist" Dec 03 06:28:42 crc kubenswrapper[4810]: I1203 06:28:42.396488 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95178480-806b-44e5-9211-abc51429aca5" path="/var/lib/kubelet/pods/95178480-806b-44e5-9211-abc51429aca5/volumes" Dec 03 06:29:00 crc kubenswrapper[4810]: I1203 06:29:00.720722 4810 generic.go:334] "Generic (PLEG): container finished" podID="91e736a7-e1a5-4b7c-9638-71c18367e234" containerID="1a3b1df6a5916dedc4176f749a76ed195344b27657cca73d1d779ff19d9fcf91" exitCode=0 Dec 03 06:29:00 crc kubenswrapper[4810]: I1203 06:29:00.720884 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" event={"ID":"91e736a7-e1a5-4b7c-9638-71c18367e234","Type":"ContainerDied","Data":"1a3b1df6a5916dedc4176f749a76ed195344b27657cca73d1d779ff19d9fcf91"} Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.235632 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285475 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-1\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285555 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ssh-key\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285625 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-inventory\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285675 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-2\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285708 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-0\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285765 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jv6wh\" (UniqueName: \"kubernetes.io/projected/91e736a7-e1a5-4b7c-9638-71c18367e234-kube-api-access-jv6wh\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.285902 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-telemetry-combined-ca-bundle\") pod \"91e736a7-e1a5-4b7c-9638-71c18367e234\" (UID: \"91e736a7-e1a5-4b7c-9638-71c18367e234\") " Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.293886 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.299374 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e736a7-e1a5-4b7c-9638-71c18367e234-kube-api-access-jv6wh" (OuterVolumeSpecName: "kube-api-access-jv6wh") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "kube-api-access-jv6wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.314308 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.318552 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-inventory" (OuterVolumeSpecName: "inventory") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.318633 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.319525 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.334748 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "91e736a7-e1a5-4b7c-9638-71c18367e234" (UID: "91e736a7-e1a5-4b7c-9638-71c18367e234"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.388589 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jv6wh\" (UniqueName: \"kubernetes.io/projected/91e736a7-e1a5-4b7c-9638-71c18367e234-kube-api-access-jv6wh\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.388719 4810 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.389262 4810 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.389284 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.389298 4810 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.389307 4810 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.389317 4810 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/91e736a7-e1a5-4b7c-9638-71c18367e234-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.749555 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" event={"ID":"91e736a7-e1a5-4b7c-9638-71c18367e234","Type":"ContainerDied","Data":"9dbb62753c086a280a277c79a6968cb21e34e79fff80ef8a1643fb2f62d22ead"} Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.749616 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dbb62753c086a280a277c79a6968cb21e34e79fff80ef8a1643fb2f62d22ead" Dec 03 06:29:02 crc kubenswrapper[4810]: I1203 06:29:02.749880 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-9zthf" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.600461 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 06:29:47 crc kubenswrapper[4810]: E1203 06:29:47.601218 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="extract-content" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.601231 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="extract-content" Dec 03 06:29:47 crc kubenswrapper[4810]: E1203 06:29:47.601251 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e736a7-e1a5-4b7c-9638-71c18367e234" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.601259 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e736a7-e1a5-4b7c-9638-71c18367e234" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 06:29:47 crc kubenswrapper[4810]: E1203 06:29:47.601274 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="extract-utilities" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.601279 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="extract-utilities" Dec 03 06:29:47 crc kubenswrapper[4810]: E1203 06:29:47.601303 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="registry-server" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.601309 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="registry-server" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.601472 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e736a7-e1a5-4b7c-9638-71c18367e234" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.601499 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="95178480-806b-44e5-9211-abc51429aca5" containerName="registry-server" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.602916 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.608406 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.608467 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.608702 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.609486 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bx766" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.632296 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.719964 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720043 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720095 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-config-data\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720123 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720167 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq5f8\" (UniqueName: \"kubernetes.io/projected/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-kube-api-access-wq5f8\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720203 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720252 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720300 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.720324 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823081 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823213 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823448 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823540 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-config-data\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823589 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823662 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq5f8\" (UniqueName: \"kubernetes.io/projected/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-kube-api-access-wq5f8\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823788 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.823874 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.824704 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.825259 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.825915 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.826489 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-config-data\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.829089 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.831267 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.832259 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.845159 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.852455 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq5f8\" (UniqueName: \"kubernetes.io/projected/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-kube-api-access-wq5f8\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.857027 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " pod="openstack/tempest-tests-tempest" Dec 03 06:29:47 crc kubenswrapper[4810]: I1203 06:29:47.926183 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 06:29:48 crc kubenswrapper[4810]: I1203 06:29:48.523398 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 06:29:49 crc kubenswrapper[4810]: I1203 06:29:49.325879 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f","Type":"ContainerStarted","Data":"43b85d63fb03e308c2f35aadec0d3c32c74b86050345268b89dac1a792316ee0"} Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.150025 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc"] Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.152691 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.155612 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.155673 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.160331 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc"] Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.312812 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w6hw\" (UniqueName: \"kubernetes.io/projected/aaf72b28-0f75-419d-92d3-a1df8823d7a4-kube-api-access-8w6hw\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.312897 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aaf72b28-0f75-419d-92d3-a1df8823d7a4-config-volume\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.313180 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aaf72b28-0f75-419d-92d3-a1df8823d7a4-secret-volume\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.415267 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aaf72b28-0f75-419d-92d3-a1df8823d7a4-secret-volume\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.415364 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w6hw\" (UniqueName: \"kubernetes.io/projected/aaf72b28-0f75-419d-92d3-a1df8823d7a4-kube-api-access-8w6hw\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.415389 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aaf72b28-0f75-419d-92d3-a1df8823d7a4-config-volume\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.416379 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aaf72b28-0f75-419d-92d3-a1df8823d7a4-config-volume\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.424894 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aaf72b28-0f75-419d-92d3-a1df8823d7a4-secret-volume\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.431609 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w6hw\" (UniqueName: \"kubernetes.io/projected/aaf72b28-0f75-419d-92d3-a1df8823d7a4-kube-api-access-8w6hw\") pod \"collect-profiles-29412390-b5blc\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:00 crc kubenswrapper[4810]: I1203 06:30:00.480501 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:04 crc kubenswrapper[4810]: I1203 06:30:04.156151 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc"] Dec 03 06:30:04 crc kubenswrapper[4810]: I1203 06:30:04.526851 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" event={"ID":"aaf72b28-0f75-419d-92d3-a1df8823d7a4","Type":"ContainerStarted","Data":"8ec7cfb31177356e3e56e3cbc5045d5412310d5d28437f9df2686d0be82f97cd"} Dec 03 06:30:04 crc kubenswrapper[4810]: I1203 06:30:04.526918 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" event={"ID":"aaf72b28-0f75-419d-92d3-a1df8823d7a4","Type":"ContainerStarted","Data":"e93522d4242721094568a10529ee89ed5a10c0df9476088b8c97f99dec6ff31c"} Dec 03 06:30:04 crc kubenswrapper[4810]: I1203 06:30:04.569983 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" podStartSLOduration=4.569953429 podStartE2EDuration="4.569953429s" podCreationTimestamp="2025-12-03 06:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:30:04.555087807 +0000 UTC m=+2928.490548648" watchObservedRunningTime="2025-12-03 06:30:04.569953429 +0000 UTC m=+2928.505414270" Dec 03 06:30:05 crc kubenswrapper[4810]: I1203 06:30:05.540164 4810 generic.go:334] "Generic (PLEG): container finished" podID="aaf72b28-0f75-419d-92d3-a1df8823d7a4" containerID="8ec7cfb31177356e3e56e3cbc5045d5412310d5d28437f9df2686d0be82f97cd" exitCode=0 Dec 03 06:30:05 crc kubenswrapper[4810]: I1203 06:30:05.540215 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" event={"ID":"aaf72b28-0f75-419d-92d3-a1df8823d7a4","Type":"ContainerDied","Data":"8ec7cfb31177356e3e56e3cbc5045d5412310d5d28437f9df2686d0be82f97cd"} Dec 03 06:30:05 crc kubenswrapper[4810]: I1203 06:30:05.543313 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f","Type":"ContainerStarted","Data":"7a56db0da43cbeb3fd9eeedf3ef1e2fc88d886df42055893b618c74ee7114221"} Dec 03 06:30:05 crc kubenswrapper[4810]: I1203 06:30:05.601300 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.340856152 podStartE2EDuration="19.601270808s" podCreationTimestamp="2025-12-03 06:29:46 +0000 UTC" firstStartedPulling="2025-12-03 06:29:48.529692753 +0000 UTC m=+2912.465153644" lastFinishedPulling="2025-12-03 06:30:03.790107449 +0000 UTC m=+2927.725568300" observedRunningTime="2025-12-03 06:30:05.589073876 +0000 UTC m=+2929.524534727" watchObservedRunningTime="2025-12-03 06:30:05.601270808 +0000 UTC m=+2929.536731679" Dec 03 06:30:06 crc kubenswrapper[4810]: I1203 06:30:06.928174 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.060909 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w6hw\" (UniqueName: \"kubernetes.io/projected/aaf72b28-0f75-419d-92d3-a1df8823d7a4-kube-api-access-8w6hw\") pod \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.061006 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aaf72b28-0f75-419d-92d3-a1df8823d7a4-config-volume\") pod \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.061050 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aaf72b28-0f75-419d-92d3-a1df8823d7a4-secret-volume\") pod \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\" (UID: \"aaf72b28-0f75-419d-92d3-a1df8823d7a4\") " Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.063190 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf72b28-0f75-419d-92d3-a1df8823d7a4-config-volume" (OuterVolumeSpecName: "config-volume") pod "aaf72b28-0f75-419d-92d3-a1df8823d7a4" (UID: "aaf72b28-0f75-419d-92d3-a1df8823d7a4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.072758 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaf72b28-0f75-419d-92d3-a1df8823d7a4-kube-api-access-8w6hw" (OuterVolumeSpecName: "kube-api-access-8w6hw") pod "aaf72b28-0f75-419d-92d3-a1df8823d7a4" (UID: "aaf72b28-0f75-419d-92d3-a1df8823d7a4"). InnerVolumeSpecName "kube-api-access-8w6hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.073927 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf72b28-0f75-419d-92d3-a1df8823d7a4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aaf72b28-0f75-419d-92d3-a1df8823d7a4" (UID: "aaf72b28-0f75-419d-92d3-a1df8823d7a4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.163392 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w6hw\" (UniqueName: \"kubernetes.io/projected/aaf72b28-0f75-419d-92d3-a1df8823d7a4-kube-api-access-8w6hw\") on node \"crc\" DevicePath \"\"" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.163430 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aaf72b28-0f75-419d-92d3-a1df8823d7a4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.163443 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aaf72b28-0f75-419d-92d3-a1df8823d7a4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.270589 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f"] Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.279588 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412345-6sv8f"] Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.565215 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" event={"ID":"aaf72b28-0f75-419d-92d3-a1df8823d7a4","Type":"ContainerDied","Data":"e93522d4242721094568a10529ee89ed5a10c0df9476088b8c97f99dec6ff31c"} Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.565266 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e93522d4242721094568a10529ee89ed5a10c0df9476088b8c97f99dec6ff31c" Dec 03 06:30:07 crc kubenswrapper[4810]: I1203 06:30:07.565311 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412390-b5blc" Dec 03 06:30:08 crc kubenswrapper[4810]: I1203 06:30:08.391272 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a1d62d9-43bd-40c8-8f37-466909c07065" path="/var/lib/kubelet/pods/4a1d62d9-43bd-40c8-8f37-466909c07065/volumes" Dec 03 06:30:29 crc kubenswrapper[4810]: I1203 06:30:29.804376 4810 scope.go:117] "RemoveContainer" containerID="794c65ba7f08ef9ab7d972f445647e1fc09bde9b6cd89e490e05545a625efc03" Dec 03 06:30:55 crc kubenswrapper[4810]: I1203 06:30:55.677691 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:30:55 crc kubenswrapper[4810]: I1203 06:30:55.678577 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:31:25 crc kubenswrapper[4810]: I1203 06:31:25.677893 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:31:25 crc kubenswrapper[4810]: I1203 06:31:25.679910 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:31:55 crc kubenswrapper[4810]: I1203 06:31:55.677282 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:31:55 crc kubenswrapper[4810]: I1203 06:31:55.678465 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:31:55 crc kubenswrapper[4810]: I1203 06:31:55.678550 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:31:55 crc kubenswrapper[4810]: I1203 06:31:55.680299 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:31:55 crc kubenswrapper[4810]: I1203 06:31:55.680388 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" gracePeriod=600 Dec 03 06:31:55 crc kubenswrapper[4810]: E1203 06:31:55.808821 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:31:56 crc kubenswrapper[4810]: I1203 06:31:56.791480 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" exitCode=0 Dec 03 06:31:56 crc kubenswrapper[4810]: I1203 06:31:56.791602 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d"} Dec 03 06:31:56 crc kubenswrapper[4810]: I1203 06:31:56.791979 4810 scope.go:117] "RemoveContainer" containerID="aed3eca2d161864fe01853fa25f501df5345a11188ef8ea66db21e2e21153b23" Dec 03 06:31:56 crc kubenswrapper[4810]: I1203 06:31:56.792350 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:31:56 crc kubenswrapper[4810]: E1203 06:31:56.792585 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.180021 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7qb95"] Dec 03 06:31:58 crc kubenswrapper[4810]: E1203 06:31:58.182836 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf72b28-0f75-419d-92d3-a1df8823d7a4" containerName="collect-profiles" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.182950 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf72b28-0f75-419d-92d3-a1df8823d7a4" containerName="collect-profiles" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.183275 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaf72b28-0f75-419d-92d3-a1df8823d7a4" containerName="collect-profiles" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.184997 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.193401 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qb95"] Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.316705 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-utilities\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.317582 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-catalog-content\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.318040 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qf9t\" (UniqueName: \"kubernetes.io/projected/1177d86d-1536-4bc2-9904-ad7565d88cff-kube-api-access-6qf9t\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.420227 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-catalog-content\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.420341 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qf9t\" (UniqueName: \"kubernetes.io/projected/1177d86d-1536-4bc2-9904-ad7565d88cff-kube-api-access-6qf9t\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.420557 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-utilities\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.420871 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-catalog-content\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.421099 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-utilities\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.446825 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qf9t\" (UniqueName: \"kubernetes.io/projected/1177d86d-1536-4bc2-9904-ad7565d88cff-kube-api-access-6qf9t\") pod \"redhat-marketplace-7qb95\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:58 crc kubenswrapper[4810]: I1203 06:31:58.517848 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:31:59 crc kubenswrapper[4810]: I1203 06:31:59.031612 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qb95"] Dec 03 06:31:59 crc kubenswrapper[4810]: W1203 06:31:59.034814 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1177d86d_1536_4bc2_9904_ad7565d88cff.slice/crio-9b4e085efea5b3026f12fcd1edee890e77e9a13584f845a3053bc2c8a7af592b WatchSource:0}: Error finding container 9b4e085efea5b3026f12fcd1edee890e77e9a13584f845a3053bc2c8a7af592b: Status 404 returned error can't find the container with id 9b4e085efea5b3026f12fcd1edee890e77e9a13584f845a3053bc2c8a7af592b Dec 03 06:31:59 crc kubenswrapper[4810]: I1203 06:31:59.850035 4810 generic.go:334] "Generic (PLEG): container finished" podID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerID="10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f" exitCode=0 Dec 03 06:31:59 crc kubenswrapper[4810]: I1203 06:31:59.850431 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerDied","Data":"10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f"} Dec 03 06:31:59 crc kubenswrapper[4810]: I1203 06:31:59.850462 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerStarted","Data":"9b4e085efea5b3026f12fcd1edee890e77e9a13584f845a3053bc2c8a7af592b"} Dec 03 06:31:59 crc kubenswrapper[4810]: I1203 06:31:59.853476 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:32:00 crc kubenswrapper[4810]: I1203 06:32:00.867720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerStarted","Data":"eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa"} Dec 03 06:32:01 crc kubenswrapper[4810]: I1203 06:32:01.888990 4810 generic.go:334] "Generic (PLEG): container finished" podID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerID="eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa" exitCode=0 Dec 03 06:32:01 crc kubenswrapper[4810]: I1203 06:32:01.889635 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerDied","Data":"eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa"} Dec 03 06:32:02 crc kubenswrapper[4810]: I1203 06:32:02.904207 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerStarted","Data":"6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94"} Dec 03 06:32:02 crc kubenswrapper[4810]: I1203 06:32:02.937786 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7qb95" podStartSLOduration=2.397665285 podStartE2EDuration="4.937766151s" podCreationTimestamp="2025-12-03 06:31:58 +0000 UTC" firstStartedPulling="2025-12-03 06:31:59.853115099 +0000 UTC m=+3043.788575960" lastFinishedPulling="2025-12-03 06:32:02.393215975 +0000 UTC m=+3046.328676826" observedRunningTime="2025-12-03 06:32:02.92824172 +0000 UTC m=+3046.863702561" watchObservedRunningTime="2025-12-03 06:32:02.937766151 +0000 UTC m=+3046.873227002" Dec 03 06:32:08 crc kubenswrapper[4810]: I1203 06:32:08.518262 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:32:08 crc kubenswrapper[4810]: I1203 06:32:08.519061 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:32:08 crc kubenswrapper[4810]: I1203 06:32:08.584831 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:32:09 crc kubenswrapper[4810]: I1203 06:32:09.046864 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:32:09 crc kubenswrapper[4810]: I1203 06:32:09.116379 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qb95"] Dec 03 06:32:09 crc kubenswrapper[4810]: I1203 06:32:09.378503 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:32:09 crc kubenswrapper[4810]: E1203 06:32:09.378864 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:32:10 crc kubenswrapper[4810]: I1203 06:32:10.984057 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7qb95" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="registry-server" containerID="cri-o://6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94" gracePeriod=2 Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.514209 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.595669 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-catalog-content\") pod \"1177d86d-1536-4bc2-9904-ad7565d88cff\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.596166 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-utilities\") pod \"1177d86d-1536-4bc2-9904-ad7565d88cff\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.596206 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qf9t\" (UniqueName: \"kubernetes.io/projected/1177d86d-1536-4bc2-9904-ad7565d88cff-kube-api-access-6qf9t\") pod \"1177d86d-1536-4bc2-9904-ad7565d88cff\" (UID: \"1177d86d-1536-4bc2-9904-ad7565d88cff\") " Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.597775 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-utilities" (OuterVolumeSpecName: "utilities") pod "1177d86d-1536-4bc2-9904-ad7565d88cff" (UID: "1177d86d-1536-4bc2-9904-ad7565d88cff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.605415 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1177d86d-1536-4bc2-9904-ad7565d88cff-kube-api-access-6qf9t" (OuterVolumeSpecName: "kube-api-access-6qf9t") pod "1177d86d-1536-4bc2-9904-ad7565d88cff" (UID: "1177d86d-1536-4bc2-9904-ad7565d88cff"). InnerVolumeSpecName "kube-api-access-6qf9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.614281 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1177d86d-1536-4bc2-9904-ad7565d88cff" (UID: "1177d86d-1536-4bc2-9904-ad7565d88cff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.698136 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.698163 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1177d86d-1536-4bc2-9904-ad7565d88cff-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.698173 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qf9t\" (UniqueName: \"kubernetes.io/projected/1177d86d-1536-4bc2-9904-ad7565d88cff-kube-api-access-6qf9t\") on node \"crc\" DevicePath \"\"" Dec 03 06:32:11 crc kubenswrapper[4810]: I1203 06:32:11.998306 4810 generic.go:334] "Generic (PLEG): container finished" podID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerID="6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94" exitCode=0 Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:11.999772 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerDied","Data":"6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94"} Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:11.999819 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qb95" event={"ID":"1177d86d-1536-4bc2-9904-ad7565d88cff","Type":"ContainerDied","Data":"9b4e085efea5b3026f12fcd1edee890e77e9a13584f845a3053bc2c8a7af592b"} Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:11.999847 4810 scope.go:117] "RemoveContainer" containerID="6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.000045 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qb95" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.038286 4810 scope.go:117] "RemoveContainer" containerID="eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.039099 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qb95"] Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.051210 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qb95"] Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.064061 4810 scope.go:117] "RemoveContainer" containerID="10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.117296 4810 scope.go:117] "RemoveContainer" containerID="6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94" Dec 03 06:32:12 crc kubenswrapper[4810]: E1203 06:32:12.118481 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94\": container with ID starting with 6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94 not found: ID does not exist" containerID="6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.118543 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94"} err="failed to get container status \"6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94\": rpc error: code = NotFound desc = could not find container \"6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94\": container with ID starting with 6af3da56195ca50b3721f1c831ab8f582e8813495f17518de326441585795f94 not found: ID does not exist" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.118583 4810 scope.go:117] "RemoveContainer" containerID="eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa" Dec 03 06:32:12 crc kubenswrapper[4810]: E1203 06:32:12.119076 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa\": container with ID starting with eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa not found: ID does not exist" containerID="eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.119206 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa"} err="failed to get container status \"eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa\": rpc error: code = NotFound desc = could not find container \"eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa\": container with ID starting with eb15851e03ca56fe6327a46ed6482a593cfba18c3707005eb912be5f1bdbe7fa not found: ID does not exist" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.119307 4810 scope.go:117] "RemoveContainer" containerID="10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f" Dec 03 06:32:12 crc kubenswrapper[4810]: E1203 06:32:12.119966 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f\": container with ID starting with 10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f not found: ID does not exist" containerID="10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.120019 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f"} err="failed to get container status \"10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f\": rpc error: code = NotFound desc = could not find container \"10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f\": container with ID starting with 10e9b533184a8e55585e74594b55c259b28a80b412bd772031fd413a8e2d183f not found: ID does not exist" Dec 03 06:32:12 crc kubenswrapper[4810]: I1203 06:32:12.393960 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" path="/var/lib/kubelet/pods/1177d86d-1536-4bc2-9904-ad7565d88cff/volumes" Dec 03 06:32:22 crc kubenswrapper[4810]: I1203 06:32:22.381021 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:32:22 crc kubenswrapper[4810]: E1203 06:32:22.381861 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:32:34 crc kubenswrapper[4810]: I1203 06:32:34.377681 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:32:34 crc kubenswrapper[4810]: E1203 06:32:34.379435 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:32:46 crc kubenswrapper[4810]: I1203 06:32:46.395777 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:32:46 crc kubenswrapper[4810]: E1203 06:32:46.398038 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:33:00 crc kubenswrapper[4810]: I1203 06:33:00.377335 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:33:00 crc kubenswrapper[4810]: E1203 06:33:00.378177 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:33:14 crc kubenswrapper[4810]: I1203 06:33:14.377718 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:33:14 crc kubenswrapper[4810]: E1203 06:33:14.378683 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:33:29 crc kubenswrapper[4810]: I1203 06:33:29.377356 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:33:29 crc kubenswrapper[4810]: E1203 06:33:29.378147 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:33:40 crc kubenswrapper[4810]: I1203 06:33:40.377922 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:33:40 crc kubenswrapper[4810]: E1203 06:33:40.379109 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:33:51 crc kubenswrapper[4810]: I1203 06:33:51.377673 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:33:51 crc kubenswrapper[4810]: E1203 06:33:51.378701 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:34:06 crc kubenswrapper[4810]: I1203 06:34:06.384639 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:34:06 crc kubenswrapper[4810]: E1203 06:34:06.385826 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:34:20 crc kubenswrapper[4810]: I1203 06:34:20.377655 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:34:20 crc kubenswrapper[4810]: E1203 06:34:20.378722 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:34:34 crc kubenswrapper[4810]: I1203 06:34:34.378152 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:34:34 crc kubenswrapper[4810]: E1203 06:34:34.378975 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:34:47 crc kubenswrapper[4810]: I1203 06:34:47.378179 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:34:47 crc kubenswrapper[4810]: E1203 06:34:47.378927 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:35:02 crc kubenswrapper[4810]: I1203 06:35:02.378211 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:35:02 crc kubenswrapper[4810]: E1203 06:35:02.379492 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:35:15 crc kubenswrapper[4810]: I1203 06:35:15.377815 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:35:15 crc kubenswrapper[4810]: E1203 06:35:15.378559 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:35:29 crc kubenswrapper[4810]: I1203 06:35:29.378769 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:35:29 crc kubenswrapper[4810]: E1203 06:35:29.379950 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:35:41 crc kubenswrapper[4810]: I1203 06:35:41.378456 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:35:41 crc kubenswrapper[4810]: E1203 06:35:41.379424 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.403376 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tksw8"] Dec 03 06:35:43 crc kubenswrapper[4810]: E1203 06:35:43.409991 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="registry-server" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.410027 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="registry-server" Dec 03 06:35:43 crc kubenswrapper[4810]: E1203 06:35:43.410066 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="extract-content" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.410076 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="extract-content" Dec 03 06:35:43 crc kubenswrapper[4810]: E1203 06:35:43.410092 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="extract-utilities" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.410101 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="extract-utilities" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.410390 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1177d86d-1536-4bc2-9904-ad7565d88cff" containerName="registry-server" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.414964 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.424942 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tksw8"] Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.541055 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-utilities\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.541132 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfnrp\" (UniqueName: \"kubernetes.io/projected/b3e76bf7-4422-4613-adea-85e5408fef55-kube-api-access-dfnrp\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.541338 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-catalog-content\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.643848 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-catalog-content\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.643994 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-utilities\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.644078 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfnrp\" (UniqueName: \"kubernetes.io/projected/b3e76bf7-4422-4613-adea-85e5408fef55-kube-api-access-dfnrp\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.644388 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-catalog-content\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.644626 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-utilities\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.661563 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfnrp\" (UniqueName: \"kubernetes.io/projected/b3e76bf7-4422-4613-adea-85e5408fef55-kube-api-access-dfnrp\") pod \"community-operators-tksw8\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:43 crc kubenswrapper[4810]: I1203 06:35:43.751040 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:44 crc kubenswrapper[4810]: I1203 06:35:44.277024 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tksw8"] Dec 03 06:35:44 crc kubenswrapper[4810]: I1203 06:35:44.303426 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerStarted","Data":"6c4971b3ce1c4aaf279d9fd3c3502f5d116a64bbf662acfc7b949034397535aa"} Dec 03 06:35:45 crc kubenswrapper[4810]: I1203 06:35:45.314163 4810 generic.go:334] "Generic (PLEG): container finished" podID="b3e76bf7-4422-4613-adea-85e5408fef55" containerID="ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93" exitCode=0 Dec 03 06:35:45 crc kubenswrapper[4810]: I1203 06:35:45.314416 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerDied","Data":"ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93"} Dec 03 06:35:46 crc kubenswrapper[4810]: I1203 06:35:46.326178 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerStarted","Data":"44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245"} Dec 03 06:35:47 crc kubenswrapper[4810]: I1203 06:35:47.342362 4810 generic.go:334] "Generic (PLEG): container finished" podID="b3e76bf7-4422-4613-adea-85e5408fef55" containerID="44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245" exitCode=0 Dec 03 06:35:47 crc kubenswrapper[4810]: I1203 06:35:47.342759 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerDied","Data":"44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245"} Dec 03 06:35:48 crc kubenswrapper[4810]: I1203 06:35:48.359827 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerStarted","Data":"5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71"} Dec 03 06:35:48 crc kubenswrapper[4810]: I1203 06:35:48.392111 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tksw8" podStartSLOduration=2.843327513 podStartE2EDuration="5.392078889s" podCreationTimestamp="2025-12-03 06:35:43 +0000 UTC" firstStartedPulling="2025-12-03 06:35:45.31677468 +0000 UTC m=+3269.252235521" lastFinishedPulling="2025-12-03 06:35:47.865526056 +0000 UTC m=+3271.800986897" observedRunningTime="2025-12-03 06:35:48.381795727 +0000 UTC m=+3272.317256628" watchObservedRunningTime="2025-12-03 06:35:48.392078889 +0000 UTC m=+3272.327539770" Dec 03 06:35:52 crc kubenswrapper[4810]: I1203 06:35:52.379331 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:35:52 crc kubenswrapper[4810]: E1203 06:35:52.380704 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:35:53 crc kubenswrapper[4810]: I1203 06:35:53.753053 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:53 crc kubenswrapper[4810]: I1203 06:35:53.753130 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:53 crc kubenswrapper[4810]: I1203 06:35:53.844614 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:54 crc kubenswrapper[4810]: I1203 06:35:54.516181 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:54 crc kubenswrapper[4810]: I1203 06:35:54.614161 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tksw8"] Dec 03 06:35:56 crc kubenswrapper[4810]: I1203 06:35:56.455540 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tksw8" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="registry-server" containerID="cri-o://5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71" gracePeriod=2 Dec 03 06:35:56 crc kubenswrapper[4810]: I1203 06:35:56.893673 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.019376 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-catalog-content\") pod \"b3e76bf7-4422-4613-adea-85e5408fef55\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.019639 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfnrp\" (UniqueName: \"kubernetes.io/projected/b3e76bf7-4422-4613-adea-85e5408fef55-kube-api-access-dfnrp\") pod \"b3e76bf7-4422-4613-adea-85e5408fef55\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.019792 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-utilities\") pod \"b3e76bf7-4422-4613-adea-85e5408fef55\" (UID: \"b3e76bf7-4422-4613-adea-85e5408fef55\") " Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.020643 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-utilities" (OuterVolumeSpecName: "utilities") pod "b3e76bf7-4422-4613-adea-85e5408fef55" (UID: "b3e76bf7-4422-4613-adea-85e5408fef55"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.025429 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3e76bf7-4422-4613-adea-85e5408fef55-kube-api-access-dfnrp" (OuterVolumeSpecName: "kube-api-access-dfnrp") pod "b3e76bf7-4422-4613-adea-85e5408fef55" (UID: "b3e76bf7-4422-4613-adea-85e5408fef55"). InnerVolumeSpecName "kube-api-access-dfnrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.070908 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3e76bf7-4422-4613-adea-85e5408fef55" (UID: "b3e76bf7-4422-4613-adea-85e5408fef55"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.122353 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfnrp\" (UniqueName: \"kubernetes.io/projected/b3e76bf7-4422-4613-adea-85e5408fef55-kube-api-access-dfnrp\") on node \"crc\" DevicePath \"\"" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.122393 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.122405 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3e76bf7-4422-4613-adea-85e5408fef55-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.470924 4810 generic.go:334] "Generic (PLEG): container finished" podID="b3e76bf7-4422-4613-adea-85e5408fef55" containerID="5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71" exitCode=0 Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.470965 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerDied","Data":"5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71"} Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.470996 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tksw8" event={"ID":"b3e76bf7-4422-4613-adea-85e5408fef55","Type":"ContainerDied","Data":"6c4971b3ce1c4aaf279d9fd3c3502f5d116a64bbf662acfc7b949034397535aa"} Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.471019 4810 scope.go:117] "RemoveContainer" containerID="5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.471460 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tksw8" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.502351 4810 scope.go:117] "RemoveContainer" containerID="44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.541012 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tksw8"] Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.548276 4810 scope.go:117] "RemoveContainer" containerID="ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.550942 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tksw8"] Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.588142 4810 scope.go:117] "RemoveContainer" containerID="5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71" Dec 03 06:35:57 crc kubenswrapper[4810]: E1203 06:35:57.591209 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71\": container with ID starting with 5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71 not found: ID does not exist" containerID="5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.591244 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71"} err="failed to get container status \"5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71\": rpc error: code = NotFound desc = could not find container \"5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71\": container with ID starting with 5cfed73e15c05a27ae70ce9957e6aa22fae85f21737c373c6ee8c13b76539a71 not found: ID does not exist" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.591273 4810 scope.go:117] "RemoveContainer" containerID="44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245" Dec 03 06:35:57 crc kubenswrapper[4810]: E1203 06:35:57.591923 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245\": container with ID starting with 44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245 not found: ID does not exist" containerID="44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.591990 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245"} err="failed to get container status \"44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245\": rpc error: code = NotFound desc = could not find container \"44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245\": container with ID starting with 44392a301eb0310d1ca16c872bfe5391500227ea78e8beb1f87e997184be0245 not found: ID does not exist" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.592037 4810 scope.go:117] "RemoveContainer" containerID="ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93" Dec 03 06:35:57 crc kubenswrapper[4810]: E1203 06:35:57.592558 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93\": container with ID starting with ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93 not found: ID does not exist" containerID="ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93" Dec 03 06:35:57 crc kubenswrapper[4810]: I1203 06:35:57.592592 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93"} err="failed to get container status \"ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93\": rpc error: code = NotFound desc = could not find container \"ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93\": container with ID starting with ce3fa97299b387cd400c1217a0bb86959d7190187557b31b05e53c41bc990a93 not found: ID does not exist" Dec 03 06:35:58 crc kubenswrapper[4810]: I1203 06:35:58.393607 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" path="/var/lib/kubelet/pods/b3e76bf7-4422-4613-adea-85e5408fef55/volumes" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.134214 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sk886"] Dec 03 06:36:02 crc kubenswrapper[4810]: E1203 06:36:02.136881 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="registry-server" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.136914 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="registry-server" Dec 03 06:36:02 crc kubenswrapper[4810]: E1203 06:36:02.136949 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="extract-content" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.136956 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="extract-content" Dec 03 06:36:02 crc kubenswrapper[4810]: E1203 06:36:02.136965 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="extract-utilities" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.136973 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="extract-utilities" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.137314 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3e76bf7-4422-4613-adea-85e5408fef55" containerName="registry-server" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.138590 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.147100 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sk886"] Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.233261 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-catalog-content\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.233343 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nshzg\" (UniqueName: \"kubernetes.io/projected/75af9b41-42c5-4f67-8bfd-689397941e46-kube-api-access-nshzg\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.233387 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-utilities\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.334788 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-catalog-content\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.334852 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nshzg\" (UniqueName: \"kubernetes.io/projected/75af9b41-42c5-4f67-8bfd-689397941e46-kube-api-access-nshzg\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.334893 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-utilities\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.335351 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-utilities\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.335593 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-catalog-content\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.356469 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nshzg\" (UniqueName: \"kubernetes.io/projected/75af9b41-42c5-4f67-8bfd-689397941e46-kube-api-access-nshzg\") pod \"redhat-operators-sk886\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.462327 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:02 crc kubenswrapper[4810]: I1203 06:36:02.988817 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sk886"] Dec 03 06:36:03 crc kubenswrapper[4810]: I1203 06:36:03.537072 4810 generic.go:334] "Generic (PLEG): container finished" podID="75af9b41-42c5-4f67-8bfd-689397941e46" containerID="f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519" exitCode=0 Dec 03 06:36:03 crc kubenswrapper[4810]: I1203 06:36:03.537111 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerDied","Data":"f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519"} Dec 03 06:36:03 crc kubenswrapper[4810]: I1203 06:36:03.537316 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerStarted","Data":"770b0c622ca95de3f4231dcab6c7b9d4202cceb533a1fbee865d0ffd6f0885da"} Dec 03 06:36:04 crc kubenswrapper[4810]: I1203 06:36:04.551910 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerStarted","Data":"9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982"} Dec 03 06:36:05 crc kubenswrapper[4810]: I1203 06:36:05.566122 4810 generic.go:334] "Generic (PLEG): container finished" podID="75af9b41-42c5-4f67-8bfd-689397941e46" containerID="9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982" exitCode=0 Dec 03 06:36:05 crc kubenswrapper[4810]: I1203 06:36:05.566218 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerDied","Data":"9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982"} Dec 03 06:36:06 crc kubenswrapper[4810]: I1203 06:36:06.390208 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:36:06 crc kubenswrapper[4810]: E1203 06:36:06.390913 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:36:06 crc kubenswrapper[4810]: I1203 06:36:06.587348 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerStarted","Data":"b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af"} Dec 03 06:36:06 crc kubenswrapper[4810]: I1203 06:36:06.620543 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sk886" podStartSLOduration=2.214853235 podStartE2EDuration="4.620521193s" podCreationTimestamp="2025-12-03 06:36:02 +0000 UTC" firstStartedPulling="2025-12-03 06:36:03.538701632 +0000 UTC m=+3287.474162473" lastFinishedPulling="2025-12-03 06:36:05.94436955 +0000 UTC m=+3289.879830431" observedRunningTime="2025-12-03 06:36:06.609283346 +0000 UTC m=+3290.544744217" watchObservedRunningTime="2025-12-03 06:36:06.620521193 +0000 UTC m=+3290.555982044" Dec 03 06:36:12 crc kubenswrapper[4810]: I1203 06:36:12.463412 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:12 crc kubenswrapper[4810]: I1203 06:36:12.464046 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:12 crc kubenswrapper[4810]: I1203 06:36:12.528472 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:12 crc kubenswrapper[4810]: I1203 06:36:12.705242 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:12 crc kubenswrapper[4810]: I1203 06:36:12.765549 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sk886"] Dec 03 06:36:14 crc kubenswrapper[4810]: I1203 06:36:14.657769 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sk886" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="registry-server" containerID="cri-o://b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af" gracePeriod=2 Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.660610 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.680806 4810 generic.go:334] "Generic (PLEG): container finished" podID="75af9b41-42c5-4f67-8bfd-689397941e46" containerID="b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af" exitCode=0 Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.680842 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerDied","Data":"b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af"} Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.680867 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sk886" event={"ID":"75af9b41-42c5-4f67-8bfd-689397941e46","Type":"ContainerDied","Data":"770b0c622ca95de3f4231dcab6c7b9d4202cceb533a1fbee865d0ffd6f0885da"} Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.680883 4810 scope.go:117] "RemoveContainer" containerID="b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.680897 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sk886" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.706250 4810 scope.go:117] "RemoveContainer" containerID="9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.734322 4810 scope.go:117] "RemoveContainer" containerID="f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.764029 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-utilities\") pod \"75af9b41-42c5-4f67-8bfd-689397941e46\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.764074 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-catalog-content\") pod \"75af9b41-42c5-4f67-8bfd-689397941e46\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.764110 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nshzg\" (UniqueName: \"kubernetes.io/projected/75af9b41-42c5-4f67-8bfd-689397941e46-kube-api-access-nshzg\") pod \"75af9b41-42c5-4f67-8bfd-689397941e46\" (UID: \"75af9b41-42c5-4f67-8bfd-689397941e46\") " Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.765117 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-utilities" (OuterVolumeSpecName: "utilities") pod "75af9b41-42c5-4f67-8bfd-689397941e46" (UID: "75af9b41-42c5-4f67-8bfd-689397941e46"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.770963 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75af9b41-42c5-4f67-8bfd-689397941e46-kube-api-access-nshzg" (OuterVolumeSpecName: "kube-api-access-nshzg") pod "75af9b41-42c5-4f67-8bfd-689397941e46" (UID: "75af9b41-42c5-4f67-8bfd-689397941e46"). InnerVolumeSpecName "kube-api-access-nshzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.785092 4810 scope.go:117] "RemoveContainer" containerID="b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af" Dec 03 06:36:16 crc kubenswrapper[4810]: E1203 06:36:16.785520 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af\": container with ID starting with b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af not found: ID does not exist" containerID="b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.785555 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af"} err="failed to get container status \"b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af\": rpc error: code = NotFound desc = could not find container \"b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af\": container with ID starting with b520255456fbce4879b6a649223c4e07d87962d297b6ec79ef6c9baa6f4a30af not found: ID does not exist" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.785575 4810 scope.go:117] "RemoveContainer" containerID="9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982" Dec 03 06:36:16 crc kubenswrapper[4810]: E1203 06:36:16.785883 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982\": container with ID starting with 9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982 not found: ID does not exist" containerID="9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.785905 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982"} err="failed to get container status \"9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982\": rpc error: code = NotFound desc = could not find container \"9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982\": container with ID starting with 9a9b1cd0588769c7e932e590569d779bd6c73e5cadedfc0c0a59ce480ec88982 not found: ID does not exist" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.785923 4810 scope.go:117] "RemoveContainer" containerID="f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519" Dec 03 06:36:16 crc kubenswrapper[4810]: E1203 06:36:16.786144 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519\": container with ID starting with f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519 not found: ID does not exist" containerID="f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.786168 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519"} err="failed to get container status \"f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519\": rpc error: code = NotFound desc = could not find container \"f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519\": container with ID starting with f4358c5ab90c5f39345e93e74b49f2ca714b2d4c5c5bc8648713be9d1563a519 not found: ID does not exist" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.870432 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.870480 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nshzg\" (UniqueName: \"kubernetes.io/projected/75af9b41-42c5-4f67-8bfd-689397941e46-kube-api-access-nshzg\") on node \"crc\" DevicePath \"\"" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.886396 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "75af9b41-42c5-4f67-8bfd-689397941e46" (UID: "75af9b41-42c5-4f67-8bfd-689397941e46"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:36:16 crc kubenswrapper[4810]: I1203 06:36:16.972823 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75af9b41-42c5-4f67-8bfd-689397941e46-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:36:17 crc kubenswrapper[4810]: I1203 06:36:17.019803 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sk886"] Dec 03 06:36:17 crc kubenswrapper[4810]: I1203 06:36:17.034023 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sk886"] Dec 03 06:36:18 crc kubenswrapper[4810]: I1203 06:36:18.377581 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:36:18 crc kubenswrapper[4810]: E1203 06:36:18.378267 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:36:18 crc kubenswrapper[4810]: I1203 06:36:18.397136 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" path="/var/lib/kubelet/pods/75af9b41-42c5-4f67-8bfd-689397941e46/volumes" Dec 03 06:36:29 crc kubenswrapper[4810]: I1203 06:36:29.378459 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:36:29 crc kubenswrapper[4810]: E1203 06:36:29.379520 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:36:44 crc kubenswrapper[4810]: I1203 06:36:44.377724 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:36:44 crc kubenswrapper[4810]: E1203 06:36:44.378703 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:36:56 crc kubenswrapper[4810]: I1203 06:36:56.385184 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:36:57 crc kubenswrapper[4810]: I1203 06:36:57.129552 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"d5b8ceb105839fde73a38502311506f2bee626155952e9037dd27cc2ac420f8a"} Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.450397 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zzkj6"] Dec 03 06:38:29 crc kubenswrapper[4810]: E1203 06:38:29.451488 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="extract-utilities" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.451508 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="extract-utilities" Dec 03 06:38:29 crc kubenswrapper[4810]: E1203 06:38:29.451533 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="registry-server" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.451543 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="registry-server" Dec 03 06:38:29 crc kubenswrapper[4810]: E1203 06:38:29.451565 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="extract-content" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.451574 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="extract-content" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.451843 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="75af9b41-42c5-4f67-8bfd-689397941e46" containerName="registry-server" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.453793 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.486050 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zzkj6"] Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.603010 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-utilities\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.603392 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7btvj\" (UniqueName: \"kubernetes.io/projected/e0427641-5398-44cd-8285-aee6bbf53ca8-kube-api-access-7btvj\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.603666 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-catalog-content\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.706010 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7btvj\" (UniqueName: \"kubernetes.io/projected/e0427641-5398-44cd-8285-aee6bbf53ca8-kube-api-access-7btvj\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.706064 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-catalog-content\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.706190 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-utilities\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.706653 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-utilities\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.706692 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-catalog-content\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.730929 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7btvj\" (UniqueName: \"kubernetes.io/projected/e0427641-5398-44cd-8285-aee6bbf53ca8-kube-api-access-7btvj\") pod \"certified-operators-zzkj6\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:29 crc kubenswrapper[4810]: I1203 06:38:29.779376 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:30 crc kubenswrapper[4810]: I1203 06:38:30.335876 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zzkj6"] Dec 03 06:38:31 crc kubenswrapper[4810]: I1203 06:38:31.168890 4810 generic.go:334] "Generic (PLEG): container finished" podID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerID="7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e" exitCode=0 Dec 03 06:38:31 crc kubenswrapper[4810]: I1203 06:38:31.168977 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerDied","Data":"7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e"} Dec 03 06:38:31 crc kubenswrapper[4810]: I1203 06:38:31.169255 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerStarted","Data":"32c6e21ea026750e8502286c80d5b4b99f0c4297db3b1ca0b511fb84664b3740"} Dec 03 06:38:31 crc kubenswrapper[4810]: I1203 06:38:31.172236 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:38:32 crc kubenswrapper[4810]: I1203 06:38:32.183905 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerStarted","Data":"a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac"} Dec 03 06:38:33 crc kubenswrapper[4810]: I1203 06:38:33.199865 4810 generic.go:334] "Generic (PLEG): container finished" podID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerID="a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac" exitCode=0 Dec 03 06:38:33 crc kubenswrapper[4810]: I1203 06:38:33.200022 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerDied","Data":"a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac"} Dec 03 06:38:34 crc kubenswrapper[4810]: I1203 06:38:34.210721 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerStarted","Data":"8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1"} Dec 03 06:38:34 crc kubenswrapper[4810]: I1203 06:38:34.232311 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zzkj6" podStartSLOduration=2.78086753 podStartE2EDuration="5.232294046s" podCreationTimestamp="2025-12-03 06:38:29 +0000 UTC" firstStartedPulling="2025-12-03 06:38:31.171829589 +0000 UTC m=+3435.107290470" lastFinishedPulling="2025-12-03 06:38:33.623256115 +0000 UTC m=+3437.558716986" observedRunningTime="2025-12-03 06:38:34.227305905 +0000 UTC m=+3438.162766776" watchObservedRunningTime="2025-12-03 06:38:34.232294046 +0000 UTC m=+3438.167754887" Dec 03 06:38:39 crc kubenswrapper[4810]: I1203 06:38:39.779947 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:39 crc kubenswrapper[4810]: I1203 06:38:39.780400 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:39 crc kubenswrapper[4810]: I1203 06:38:39.850613 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:40 crc kubenswrapper[4810]: I1203 06:38:40.344973 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:40 crc kubenswrapper[4810]: I1203 06:38:40.408081 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zzkj6"] Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.283923 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zzkj6" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="registry-server" containerID="cri-o://8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1" gracePeriod=2 Dec 03 06:38:42 crc kubenswrapper[4810]: E1203 06:38:42.439996 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0427641_5398_44cd_8285_aee6bbf53ca8.slice/crio-8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1.scope\": RecentStats: unable to find data in memory cache]" Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.722493 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.818055 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-catalog-content\") pod \"e0427641-5398-44cd-8285-aee6bbf53ca8\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.818334 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7btvj\" (UniqueName: \"kubernetes.io/projected/e0427641-5398-44cd-8285-aee6bbf53ca8-kube-api-access-7btvj\") pod \"e0427641-5398-44cd-8285-aee6bbf53ca8\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.818406 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-utilities\") pod \"e0427641-5398-44cd-8285-aee6bbf53ca8\" (UID: \"e0427641-5398-44cd-8285-aee6bbf53ca8\") " Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.819485 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-utilities" (OuterVolumeSpecName: "utilities") pod "e0427641-5398-44cd-8285-aee6bbf53ca8" (UID: "e0427641-5398-44cd-8285-aee6bbf53ca8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.824471 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0427641-5398-44cd-8285-aee6bbf53ca8-kube-api-access-7btvj" (OuterVolumeSpecName: "kube-api-access-7btvj") pod "e0427641-5398-44cd-8285-aee6bbf53ca8" (UID: "e0427641-5398-44cd-8285-aee6bbf53ca8"). InnerVolumeSpecName "kube-api-access-7btvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.923421 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:38:42 crc kubenswrapper[4810]: I1203 06:38:42.923492 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7btvj\" (UniqueName: \"kubernetes.io/projected/e0427641-5398-44cd-8285-aee6bbf53ca8-kube-api-access-7btvj\") on node \"crc\" DevicePath \"\"" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.151819 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0427641-5398-44cd-8285-aee6bbf53ca8" (UID: "e0427641-5398-44cd-8285-aee6bbf53ca8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.228161 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0427641-5398-44cd-8285-aee6bbf53ca8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.295883 4810 generic.go:334] "Generic (PLEG): container finished" podID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerID="8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1" exitCode=0 Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.295946 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerDied","Data":"8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1"} Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.295981 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzkj6" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.296014 4810 scope.go:117] "RemoveContainer" containerID="8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.295999 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzkj6" event={"ID":"e0427641-5398-44cd-8285-aee6bbf53ca8","Type":"ContainerDied","Data":"32c6e21ea026750e8502286c80d5b4b99f0c4297db3b1ca0b511fb84664b3740"} Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.328332 4810 scope.go:117] "RemoveContainer" containerID="a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.335594 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zzkj6"] Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.350390 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zzkj6"] Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.373974 4810 scope.go:117] "RemoveContainer" containerID="7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.423853 4810 scope.go:117] "RemoveContainer" containerID="8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1" Dec 03 06:38:43 crc kubenswrapper[4810]: E1203 06:38:43.424397 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1\": container with ID starting with 8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1 not found: ID does not exist" containerID="8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.424422 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1"} err="failed to get container status \"8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1\": rpc error: code = NotFound desc = could not find container \"8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1\": container with ID starting with 8a0a6b2cbb102b61291c0f34a61dc2e47470b478bb416a142f4512ce01d10bd1 not found: ID does not exist" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.424442 4810 scope.go:117] "RemoveContainer" containerID="a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac" Dec 03 06:38:43 crc kubenswrapper[4810]: E1203 06:38:43.425009 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac\": container with ID starting with a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac not found: ID does not exist" containerID="a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.425031 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac"} err="failed to get container status \"a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac\": rpc error: code = NotFound desc = could not find container \"a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac\": container with ID starting with a02474552988fa5b1d07b40250f21f16e155e4693697db673a8c5cbc7f0b6fac not found: ID does not exist" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.425044 4810 scope.go:117] "RemoveContainer" containerID="7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e" Dec 03 06:38:43 crc kubenswrapper[4810]: E1203 06:38:43.425587 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e\": container with ID starting with 7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e not found: ID does not exist" containerID="7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e" Dec 03 06:38:43 crc kubenswrapper[4810]: I1203 06:38:43.425614 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e"} err="failed to get container status \"7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e\": rpc error: code = NotFound desc = could not find container \"7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e\": container with ID starting with 7b5ff7c4092c8802a3c43dd67c79f25f348a7883f53fa594d3030e8dc4114b8e not found: ID does not exist" Dec 03 06:38:44 crc kubenswrapper[4810]: I1203 06:38:44.392345 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" path="/var/lib/kubelet/pods/e0427641-5398-44cd-8285-aee6bbf53ca8/volumes" Dec 03 06:39:25 crc kubenswrapper[4810]: I1203 06:39:25.677034 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:39:25 crc kubenswrapper[4810]: I1203 06:39:25.677464 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:39:55 crc kubenswrapper[4810]: I1203 06:39:55.676950 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:39:55 crc kubenswrapper[4810]: I1203 06:39:55.677615 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:40:25 crc kubenswrapper[4810]: I1203 06:40:25.677640 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:40:25 crc kubenswrapper[4810]: I1203 06:40:25.678146 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:40:25 crc kubenswrapper[4810]: I1203 06:40:25.678224 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:40:25 crc kubenswrapper[4810]: I1203 06:40:25.679130 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5b8ceb105839fde73a38502311506f2bee626155952e9037dd27cc2ac420f8a"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:40:25 crc kubenswrapper[4810]: I1203 06:40:25.679204 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://d5b8ceb105839fde73a38502311506f2bee626155952e9037dd27cc2ac420f8a" gracePeriod=600 Dec 03 06:40:26 crc kubenswrapper[4810]: I1203 06:40:26.421721 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="d5b8ceb105839fde73a38502311506f2bee626155952e9037dd27cc2ac420f8a" exitCode=0 Dec 03 06:40:26 crc kubenswrapper[4810]: I1203 06:40:26.421792 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"d5b8ceb105839fde73a38502311506f2bee626155952e9037dd27cc2ac420f8a"} Dec 03 06:40:26 crc kubenswrapper[4810]: I1203 06:40:26.422290 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2"} Dec 03 06:40:26 crc kubenswrapper[4810]: I1203 06:40:26.422316 4810 scope.go:117] "RemoveContainer" containerID="72de17b81712ced903c6f690677e63e8105d1ed550d5fd420fea7ec353cedf2d" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.076937 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zl67v"] Dec 03 06:42:20 crc kubenswrapper[4810]: E1203 06:42:20.078507 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="registry-server" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.078533 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="registry-server" Dec 03 06:42:20 crc kubenswrapper[4810]: E1203 06:42:20.078566 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="extract-utilities" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.078584 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="extract-utilities" Dec 03 06:42:20 crc kubenswrapper[4810]: E1203 06:42:20.078624 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="extract-content" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.078640 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="extract-content" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.079079 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0427641-5398-44cd-8285-aee6bbf53ca8" containerName="registry-server" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.082144 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.103997 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl67v"] Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.244850 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6fdt\" (UniqueName: \"kubernetes.io/projected/b67b238c-667a-4fda-a6c7-57fbac3097e1-kube-api-access-p6fdt\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.244902 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-catalog-content\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.244935 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-utilities\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.347035 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6fdt\" (UniqueName: \"kubernetes.io/projected/b67b238c-667a-4fda-a6c7-57fbac3097e1-kube-api-access-p6fdt\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.347086 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-catalog-content\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.347112 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-utilities\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.347709 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-utilities\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.348375 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-catalog-content\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.372003 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6fdt\" (UniqueName: \"kubernetes.io/projected/b67b238c-667a-4fda-a6c7-57fbac3097e1-kube-api-access-p6fdt\") pod \"redhat-marketplace-zl67v\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.408863 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:20 crc kubenswrapper[4810]: I1203 06:42:20.880784 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl67v"] Dec 03 06:42:21 crc kubenswrapper[4810]: I1203 06:42:21.733860 4810 generic.go:334] "Generic (PLEG): container finished" podID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerID="238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d" exitCode=0 Dec 03 06:42:21 crc kubenswrapper[4810]: I1203 06:42:21.734427 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl67v" event={"ID":"b67b238c-667a-4fda-a6c7-57fbac3097e1","Type":"ContainerDied","Data":"238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d"} Dec 03 06:42:21 crc kubenswrapper[4810]: I1203 06:42:21.734464 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl67v" event={"ID":"b67b238c-667a-4fda-a6c7-57fbac3097e1","Type":"ContainerStarted","Data":"593440f4fad8d9049481da663a2c25682eef5577e12118e31fc37a6bceb09751"} Dec 03 06:42:23 crc kubenswrapper[4810]: I1203 06:42:23.760265 4810 generic.go:334] "Generic (PLEG): container finished" podID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerID="c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64" exitCode=0 Dec 03 06:42:23 crc kubenswrapper[4810]: I1203 06:42:23.760400 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl67v" event={"ID":"b67b238c-667a-4fda-a6c7-57fbac3097e1","Type":"ContainerDied","Data":"c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64"} Dec 03 06:42:24 crc kubenswrapper[4810]: I1203 06:42:24.774928 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl67v" event={"ID":"b67b238c-667a-4fda-a6c7-57fbac3097e1","Type":"ContainerStarted","Data":"9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18"} Dec 03 06:42:24 crc kubenswrapper[4810]: I1203 06:42:24.807923 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zl67v" podStartSLOduration=2.384884443 podStartE2EDuration="4.807896061s" podCreationTimestamp="2025-12-03 06:42:20 +0000 UTC" firstStartedPulling="2025-12-03 06:42:21.738147307 +0000 UTC m=+3665.673608188" lastFinishedPulling="2025-12-03 06:42:24.161158965 +0000 UTC m=+3668.096619806" observedRunningTime="2025-12-03 06:42:24.805395635 +0000 UTC m=+3668.740856506" watchObservedRunningTime="2025-12-03 06:42:24.807896061 +0000 UTC m=+3668.743356932" Dec 03 06:42:30 crc kubenswrapper[4810]: I1203 06:42:30.414144 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:30 crc kubenswrapper[4810]: I1203 06:42:30.415133 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:30 crc kubenswrapper[4810]: I1203 06:42:30.485722 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:30 crc kubenswrapper[4810]: I1203 06:42:30.908646 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:30 crc kubenswrapper[4810]: I1203 06:42:30.974023 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl67v"] Dec 03 06:42:32 crc kubenswrapper[4810]: I1203 06:42:32.859317 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zl67v" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="registry-server" containerID="cri-o://9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18" gracePeriod=2 Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.395569 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.513330 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6fdt\" (UniqueName: \"kubernetes.io/projected/b67b238c-667a-4fda-a6c7-57fbac3097e1-kube-api-access-p6fdt\") pod \"b67b238c-667a-4fda-a6c7-57fbac3097e1\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.513666 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-utilities\") pod \"b67b238c-667a-4fda-a6c7-57fbac3097e1\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.513828 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-catalog-content\") pod \"b67b238c-667a-4fda-a6c7-57fbac3097e1\" (UID: \"b67b238c-667a-4fda-a6c7-57fbac3097e1\") " Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.514399 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-utilities" (OuterVolumeSpecName: "utilities") pod "b67b238c-667a-4fda-a6c7-57fbac3097e1" (UID: "b67b238c-667a-4fda-a6c7-57fbac3097e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.526265 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b67b238c-667a-4fda-a6c7-57fbac3097e1-kube-api-access-p6fdt" (OuterVolumeSpecName: "kube-api-access-p6fdt") pod "b67b238c-667a-4fda-a6c7-57fbac3097e1" (UID: "b67b238c-667a-4fda-a6c7-57fbac3097e1"). InnerVolumeSpecName "kube-api-access-p6fdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.536181 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b67b238c-667a-4fda-a6c7-57fbac3097e1" (UID: "b67b238c-667a-4fda-a6c7-57fbac3097e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.616130 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6fdt\" (UniqueName: \"kubernetes.io/projected/b67b238c-667a-4fda-a6c7-57fbac3097e1-kube-api-access-p6fdt\") on node \"crc\" DevicePath \"\"" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.616200 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.616229 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67b238c-667a-4fda-a6c7-57fbac3097e1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.874044 4810 generic.go:334] "Generic (PLEG): container finished" podID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerID="9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18" exitCode=0 Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.874129 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zl67v" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.874166 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl67v" event={"ID":"b67b238c-667a-4fda-a6c7-57fbac3097e1","Type":"ContainerDied","Data":"9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18"} Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.875529 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zl67v" event={"ID":"b67b238c-667a-4fda-a6c7-57fbac3097e1","Type":"ContainerDied","Data":"593440f4fad8d9049481da663a2c25682eef5577e12118e31fc37a6bceb09751"} Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.875569 4810 scope.go:117] "RemoveContainer" containerID="9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.907823 4810 scope.go:117] "RemoveContainer" containerID="c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.919860 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl67v"] Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.932414 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zl67v"] Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.948206 4810 scope.go:117] "RemoveContainer" containerID="238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.980063 4810 scope.go:117] "RemoveContainer" containerID="9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18" Dec 03 06:42:33 crc kubenswrapper[4810]: E1203 06:42:33.980486 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18\": container with ID starting with 9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18 not found: ID does not exist" containerID="9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.980527 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18"} err="failed to get container status \"9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18\": rpc error: code = NotFound desc = could not find container \"9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18\": container with ID starting with 9fa0083c005aef0cdf7da3c38e566b86433b02bee0ecd08d9109ab0d1230ef18 not found: ID does not exist" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.980547 4810 scope.go:117] "RemoveContainer" containerID="c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64" Dec 03 06:42:33 crc kubenswrapper[4810]: E1203 06:42:33.980878 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64\": container with ID starting with c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64 not found: ID does not exist" containerID="c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.980897 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64"} err="failed to get container status \"c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64\": rpc error: code = NotFound desc = could not find container \"c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64\": container with ID starting with c312833cd3687766edb17859c434ce6c528a96800bdca50fc5485327bfac5f64 not found: ID does not exist" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.980921 4810 scope.go:117] "RemoveContainer" containerID="238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d" Dec 03 06:42:33 crc kubenswrapper[4810]: E1203 06:42:33.981143 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d\": container with ID starting with 238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d not found: ID does not exist" containerID="238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d" Dec 03 06:42:33 crc kubenswrapper[4810]: I1203 06:42:33.981163 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d"} err="failed to get container status \"238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d\": rpc error: code = NotFound desc = could not find container \"238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d\": container with ID starting with 238e17ead2a2fca3ec9b87f984b92766a676dfa97df84f426c7a7dd2fcf6917d not found: ID does not exist" Dec 03 06:42:34 crc kubenswrapper[4810]: I1203 06:42:34.393300 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" path="/var/lib/kubelet/pods/b67b238c-667a-4fda-a6c7-57fbac3097e1/volumes" Dec 03 06:42:55 crc kubenswrapper[4810]: I1203 06:42:55.677822 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:42:55 crc kubenswrapper[4810]: I1203 06:42:55.678382 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:43:25 crc kubenswrapper[4810]: I1203 06:43:25.676807 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:43:25 crc kubenswrapper[4810]: I1203 06:43:25.677387 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:43:55 crc kubenswrapper[4810]: I1203 06:43:55.677959 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:43:55 crc kubenswrapper[4810]: I1203 06:43:55.678503 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:43:55 crc kubenswrapper[4810]: I1203 06:43:55.678569 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:43:55 crc kubenswrapper[4810]: I1203 06:43:55.679294 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:43:55 crc kubenswrapper[4810]: I1203 06:43:55.679377 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" gracePeriod=600 Dec 03 06:43:55 crc kubenswrapper[4810]: E1203 06:43:55.822791 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:43:56 crc kubenswrapper[4810]: I1203 06:43:56.742998 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" exitCode=0 Dec 03 06:43:56 crc kubenswrapper[4810]: I1203 06:43:56.743085 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2"} Dec 03 06:43:56 crc kubenswrapper[4810]: I1203 06:43:56.743532 4810 scope.go:117] "RemoveContainer" containerID="d5b8ceb105839fde73a38502311506f2bee626155952e9037dd27cc2ac420f8a" Dec 03 06:43:56 crc kubenswrapper[4810]: I1203 06:43:56.744634 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:43:56 crc kubenswrapper[4810]: E1203 06:43:56.745183 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:44:10 crc kubenswrapper[4810]: I1203 06:44:10.378451 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:44:10 crc kubenswrapper[4810]: E1203 06:44:10.379169 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:44:24 crc kubenswrapper[4810]: I1203 06:44:24.378690 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:44:24 crc kubenswrapper[4810]: E1203 06:44:24.379977 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:44:37 crc kubenswrapper[4810]: I1203 06:44:37.378138 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:44:37 crc kubenswrapper[4810]: E1203 06:44:37.379885 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:44:48 crc kubenswrapper[4810]: I1203 06:44:48.379280 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:44:48 crc kubenswrapper[4810]: E1203 06:44:48.379987 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:44:59 crc kubenswrapper[4810]: I1203 06:44:59.378022 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:44:59 crc kubenswrapper[4810]: E1203 06:44:59.378714 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.186023 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4"] Dec 03 06:45:00 crc kubenswrapper[4810]: E1203 06:45:00.186553 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="extract-utilities" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.186602 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="extract-utilities" Dec 03 06:45:00 crc kubenswrapper[4810]: E1203 06:45:00.186625 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="registry-server" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.186633 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="registry-server" Dec 03 06:45:00 crc kubenswrapper[4810]: E1203 06:45:00.186646 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="extract-content" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.186654 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="extract-content" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.186984 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b67b238c-667a-4fda-a6c7-57fbac3097e1" containerName="registry-server" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.188047 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.193530 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.193827 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.204713 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4"] Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.294595 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfclg\" (UniqueName: \"kubernetes.io/projected/d3747dff-135b-4959-99a4-e0665a252f60-kube-api-access-qfclg\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.294963 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d3747dff-135b-4959-99a4-e0665a252f60-config-volume\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.295112 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d3747dff-135b-4959-99a4-e0665a252f60-secret-volume\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.396802 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfclg\" (UniqueName: \"kubernetes.io/projected/d3747dff-135b-4959-99a4-e0665a252f60-kube-api-access-qfclg\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.397408 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d3747dff-135b-4959-99a4-e0665a252f60-config-volume\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.397534 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d3747dff-135b-4959-99a4-e0665a252f60-secret-volume\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.399048 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d3747dff-135b-4959-99a4-e0665a252f60-config-volume\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.407379 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d3747dff-135b-4959-99a4-e0665a252f60-secret-volume\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.416420 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfclg\" (UniqueName: \"kubernetes.io/projected/d3747dff-135b-4959-99a4-e0665a252f60-kube-api-access-qfclg\") pod \"collect-profiles-29412405-d7xj4\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:00 crc kubenswrapper[4810]: I1203 06:45:00.519173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:01 crc kubenswrapper[4810]: I1203 06:45:01.017618 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4"] Dec 03 06:45:01 crc kubenswrapper[4810]: W1203 06:45:01.020441 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3747dff_135b_4959_99a4_e0665a252f60.slice/crio-4b60b5193cd86b246067e99942e7a778925b6ddea42153c5ee9ab2f9dff63911 WatchSource:0}: Error finding container 4b60b5193cd86b246067e99942e7a778925b6ddea42153c5ee9ab2f9dff63911: Status 404 returned error can't find the container with id 4b60b5193cd86b246067e99942e7a778925b6ddea42153c5ee9ab2f9dff63911 Dec 03 06:45:01 crc kubenswrapper[4810]: I1203 06:45:01.619553 4810 generic.go:334] "Generic (PLEG): container finished" podID="d3747dff-135b-4959-99a4-e0665a252f60" containerID="0e0a665abdcbe6248fb637ffc03109d569b510641c442783eb00acc67559f58c" exitCode=0 Dec 03 06:45:01 crc kubenswrapper[4810]: I1203 06:45:01.619806 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" event={"ID":"d3747dff-135b-4959-99a4-e0665a252f60","Type":"ContainerDied","Data":"0e0a665abdcbe6248fb637ffc03109d569b510641c442783eb00acc67559f58c"} Dec 03 06:45:01 crc kubenswrapper[4810]: I1203 06:45:01.619876 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" event={"ID":"d3747dff-135b-4959-99a4-e0665a252f60","Type":"ContainerStarted","Data":"4b60b5193cd86b246067e99942e7a778925b6ddea42153c5ee9ab2f9dff63911"} Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.003423 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.050339 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfclg\" (UniqueName: \"kubernetes.io/projected/d3747dff-135b-4959-99a4-e0665a252f60-kube-api-access-qfclg\") pod \"d3747dff-135b-4959-99a4-e0665a252f60\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.050426 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d3747dff-135b-4959-99a4-e0665a252f60-config-volume\") pod \"d3747dff-135b-4959-99a4-e0665a252f60\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.050538 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d3747dff-135b-4959-99a4-e0665a252f60-secret-volume\") pod \"d3747dff-135b-4959-99a4-e0665a252f60\" (UID: \"d3747dff-135b-4959-99a4-e0665a252f60\") " Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.051151 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3747dff-135b-4959-99a4-e0665a252f60-config-volume" (OuterVolumeSpecName: "config-volume") pod "d3747dff-135b-4959-99a4-e0665a252f60" (UID: "d3747dff-135b-4959-99a4-e0665a252f60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.056263 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3747dff-135b-4959-99a4-e0665a252f60-kube-api-access-qfclg" (OuterVolumeSpecName: "kube-api-access-qfclg") pod "d3747dff-135b-4959-99a4-e0665a252f60" (UID: "d3747dff-135b-4959-99a4-e0665a252f60"). InnerVolumeSpecName "kube-api-access-qfclg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.056405 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3747dff-135b-4959-99a4-e0665a252f60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d3747dff-135b-4959-99a4-e0665a252f60" (UID: "d3747dff-135b-4959-99a4-e0665a252f60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.152411 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfclg\" (UniqueName: \"kubernetes.io/projected/d3747dff-135b-4959-99a4-e0665a252f60-kube-api-access-qfclg\") on node \"crc\" DevicePath \"\"" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.152440 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d3747dff-135b-4959-99a4-e0665a252f60-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.152449 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d3747dff-135b-4959-99a4-e0665a252f60-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.643991 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" event={"ID":"d3747dff-135b-4959-99a4-e0665a252f60","Type":"ContainerDied","Data":"4b60b5193cd86b246067e99942e7a778925b6ddea42153c5ee9ab2f9dff63911"} Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.644342 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b60b5193cd86b246067e99942e7a778925b6ddea42153c5ee9ab2f9dff63911" Dec 03 06:45:03 crc kubenswrapper[4810]: I1203 06:45:03.644068 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412405-d7xj4" Dec 03 06:45:04 crc kubenswrapper[4810]: I1203 06:45:04.113541 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79"] Dec 03 06:45:04 crc kubenswrapper[4810]: I1203 06:45:04.123598 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412360-vnf79"] Dec 03 06:45:04 crc kubenswrapper[4810]: I1203 06:45:04.411805 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fd6f915-b856-4492-bcf7-11d93ac2f696" path="/var/lib/kubelet/pods/6fd6f915-b856-4492-bcf7-11d93ac2f696/volumes" Dec 03 06:45:11 crc kubenswrapper[4810]: I1203 06:45:11.392933 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:45:11 crc kubenswrapper[4810]: E1203 06:45:11.394378 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:45:25 crc kubenswrapper[4810]: I1203 06:45:25.377350 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:45:25 crc kubenswrapper[4810]: E1203 06:45:25.378337 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:45:30 crc kubenswrapper[4810]: I1203 06:45:30.248629 4810 scope.go:117] "RemoveContainer" containerID="794895541d9a21e66ee930bf3177b97d32372698615d953311fdceb8f1df6819" Dec 03 06:45:39 crc kubenswrapper[4810]: I1203 06:45:39.379172 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:45:39 crc kubenswrapper[4810]: E1203 06:45:39.380378 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.535942 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sssz6"] Dec 03 06:45:44 crc kubenswrapper[4810]: E1203 06:45:44.536934 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3747dff-135b-4959-99a4-e0665a252f60" containerName="collect-profiles" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.536948 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3747dff-135b-4959-99a4-e0665a252f60" containerName="collect-profiles" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.539906 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3747dff-135b-4959-99a4-e0665a252f60" containerName="collect-profiles" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.541374 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.563426 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sssz6"] Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.679869 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnsbb\" (UniqueName: \"kubernetes.io/projected/fe52a321-94d8-464b-8fd4-9c7538db9826-kube-api-access-jnsbb\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.679975 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-utilities\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.680020 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-catalog-content\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.781781 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnsbb\" (UniqueName: \"kubernetes.io/projected/fe52a321-94d8-464b-8fd4-9c7538db9826-kube-api-access-jnsbb\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.782095 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-utilities\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.782182 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-catalog-content\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.782721 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-catalog-content\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.783004 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-utilities\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.811194 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnsbb\" (UniqueName: \"kubernetes.io/projected/fe52a321-94d8-464b-8fd4-9c7538db9826-kube-api-access-jnsbb\") pod \"community-operators-sssz6\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:44 crc kubenswrapper[4810]: I1203 06:45:44.910758 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:45 crc kubenswrapper[4810]: I1203 06:45:45.395058 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sssz6"] Dec 03 06:45:46 crc kubenswrapper[4810]: I1203 06:45:46.186521 4810 generic.go:334] "Generic (PLEG): container finished" podID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerID="8aa61b8b2b2cb4ee9dc8d50bbe76d8c6d266577e44b92c66121282fcb99e041b" exitCode=0 Dec 03 06:45:46 crc kubenswrapper[4810]: I1203 06:45:46.186597 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerDied","Data":"8aa61b8b2b2cb4ee9dc8d50bbe76d8c6d266577e44b92c66121282fcb99e041b"} Dec 03 06:45:46 crc kubenswrapper[4810]: I1203 06:45:46.187500 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerStarted","Data":"132d8854ad314e424b56be699815b55495acd4e376f6b793e1aee8bb20912f8f"} Dec 03 06:45:46 crc kubenswrapper[4810]: I1203 06:45:46.193515 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:45:47 crc kubenswrapper[4810]: I1203 06:45:47.198782 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerStarted","Data":"466ffbba40be0eb08a8244316611a29a0eac6ea5052b94f5565fe3a709345291"} Dec 03 06:45:48 crc kubenswrapper[4810]: I1203 06:45:48.210106 4810 generic.go:334] "Generic (PLEG): container finished" podID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerID="466ffbba40be0eb08a8244316611a29a0eac6ea5052b94f5565fe3a709345291" exitCode=0 Dec 03 06:45:48 crc kubenswrapper[4810]: I1203 06:45:48.210219 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerDied","Data":"466ffbba40be0eb08a8244316611a29a0eac6ea5052b94f5565fe3a709345291"} Dec 03 06:45:49 crc kubenswrapper[4810]: I1203 06:45:49.222413 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerStarted","Data":"7c40b8b21f1d3fbb01036246c95e7727e5174b02ea6ea5ff2efe1f6280f30e9f"} Dec 03 06:45:49 crc kubenswrapper[4810]: I1203 06:45:49.258727 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sssz6" podStartSLOduration=2.856533951 podStartE2EDuration="5.258705997s" podCreationTimestamp="2025-12-03 06:45:44 +0000 UTC" firstStartedPulling="2025-12-03 06:45:46.193086693 +0000 UTC m=+3870.128547574" lastFinishedPulling="2025-12-03 06:45:48.595258759 +0000 UTC m=+3872.530719620" observedRunningTime="2025-12-03 06:45:49.246795663 +0000 UTC m=+3873.182256544" watchObservedRunningTime="2025-12-03 06:45:49.258705997 +0000 UTC m=+3873.194166848" Dec 03 06:45:52 crc kubenswrapper[4810]: I1203 06:45:52.378016 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:45:52 crc kubenswrapper[4810]: E1203 06:45:52.378998 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:45:54 crc kubenswrapper[4810]: I1203 06:45:54.912088 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:54 crc kubenswrapper[4810]: I1203 06:45:54.912981 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:54 crc kubenswrapper[4810]: I1203 06:45:54.989788 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:55 crc kubenswrapper[4810]: I1203 06:45:55.407624 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:55 crc kubenswrapper[4810]: I1203 06:45:55.471226 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sssz6"] Dec 03 06:45:57 crc kubenswrapper[4810]: I1203 06:45:57.307304 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sssz6" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="registry-server" containerID="cri-o://7c40b8b21f1d3fbb01036246c95e7727e5174b02ea6ea5ff2efe1f6280f30e9f" gracePeriod=2 Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.318928 4810 generic.go:334] "Generic (PLEG): container finished" podID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerID="7c40b8b21f1d3fbb01036246c95e7727e5174b02ea6ea5ff2efe1f6280f30e9f" exitCode=0 Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.319267 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerDied","Data":"7c40b8b21f1d3fbb01036246c95e7727e5174b02ea6ea5ff2efe1f6280f30e9f"} Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.319299 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sssz6" event={"ID":"fe52a321-94d8-464b-8fd4-9c7538db9826","Type":"ContainerDied","Data":"132d8854ad314e424b56be699815b55495acd4e376f6b793e1aee8bb20912f8f"} Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.319316 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="132d8854ad314e424b56be699815b55495acd4e376f6b793e1aee8bb20912f8f" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.349251 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.477606 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-utilities\") pod \"fe52a321-94d8-464b-8fd4-9c7538db9826\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.477844 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-catalog-content\") pod \"fe52a321-94d8-464b-8fd4-9c7538db9826\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.478015 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnsbb\" (UniqueName: \"kubernetes.io/projected/fe52a321-94d8-464b-8fd4-9c7538db9826-kube-api-access-jnsbb\") pod \"fe52a321-94d8-464b-8fd4-9c7538db9826\" (UID: \"fe52a321-94d8-464b-8fd4-9c7538db9826\") " Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.478966 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-utilities" (OuterVolumeSpecName: "utilities") pod "fe52a321-94d8-464b-8fd4-9c7538db9826" (UID: "fe52a321-94d8-464b-8fd4-9c7538db9826"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.496522 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe52a321-94d8-464b-8fd4-9c7538db9826-kube-api-access-jnsbb" (OuterVolumeSpecName: "kube-api-access-jnsbb") pod "fe52a321-94d8-464b-8fd4-9c7538db9826" (UID: "fe52a321-94d8-464b-8fd4-9c7538db9826"). InnerVolumeSpecName "kube-api-access-jnsbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.537669 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe52a321-94d8-464b-8fd4-9c7538db9826" (UID: "fe52a321-94d8-464b-8fd4-9c7538db9826"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.580160 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnsbb\" (UniqueName: \"kubernetes.io/projected/fe52a321-94d8-464b-8fd4-9c7538db9826-kube-api-access-jnsbb\") on node \"crc\" DevicePath \"\"" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.580191 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:45:58 crc kubenswrapper[4810]: I1203 06:45:58.580202 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe52a321-94d8-464b-8fd4-9c7538db9826-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:45:59 crc kubenswrapper[4810]: I1203 06:45:59.331489 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sssz6" Dec 03 06:45:59 crc kubenswrapper[4810]: I1203 06:45:59.408375 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sssz6"] Dec 03 06:45:59 crc kubenswrapper[4810]: I1203 06:45:59.420989 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sssz6"] Dec 03 06:46:00 crc kubenswrapper[4810]: I1203 06:46:00.396645 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" path="/var/lib/kubelet/pods/fe52a321-94d8-464b-8fd4-9c7538db9826/volumes" Dec 03 06:46:05 crc kubenswrapper[4810]: I1203 06:46:05.378539 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:46:05 crc kubenswrapper[4810]: E1203 06:46:05.379868 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:46:19 crc kubenswrapper[4810]: I1203 06:46:19.377446 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:46:19 crc kubenswrapper[4810]: E1203 06:46:19.378241 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.432887 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gpfgq"] Dec 03 06:46:26 crc kubenswrapper[4810]: E1203 06:46:26.434207 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="extract-content" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.434236 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="extract-content" Dec 03 06:46:26 crc kubenswrapper[4810]: E1203 06:46:26.434303 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="registry-server" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.434322 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="registry-server" Dec 03 06:46:26 crc kubenswrapper[4810]: E1203 06:46:26.434353 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="extract-utilities" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.434371 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="extract-utilities" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.434782 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe52a321-94d8-464b-8fd4-9c7538db9826" containerName="registry-server" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.437538 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.446540 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gpfgq"] Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.517434 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-catalog-content\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.518654 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccmq8\" (UniqueName: \"kubernetes.io/projected/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-kube-api-access-ccmq8\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.518804 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-utilities\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.620709 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-catalog-content\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.620861 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccmq8\" (UniqueName: \"kubernetes.io/projected/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-kube-api-access-ccmq8\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.620898 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-utilities\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.621138 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-catalog-content\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.621268 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-utilities\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.640051 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccmq8\" (UniqueName: \"kubernetes.io/projected/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-kube-api-access-ccmq8\") pod \"redhat-operators-gpfgq\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:26 crc kubenswrapper[4810]: I1203 06:46:26.812461 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:27 crc kubenswrapper[4810]: I1203 06:46:27.287299 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gpfgq"] Dec 03 06:46:27 crc kubenswrapper[4810]: I1203 06:46:27.690876 4810 generic.go:334] "Generic (PLEG): container finished" podID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerID="21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03" exitCode=0 Dec 03 06:46:27 crc kubenswrapper[4810]: I1203 06:46:27.690928 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerDied","Data":"21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03"} Dec 03 06:46:27 crc kubenswrapper[4810]: I1203 06:46:27.691154 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerStarted","Data":"6927fc0d839f2d86e8604564c3aa8b4bd106fa3902e3e959847351f96e4a36de"} Dec 03 06:46:28 crc kubenswrapper[4810]: I1203 06:46:28.709165 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerStarted","Data":"e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06"} Dec 03 06:46:29 crc kubenswrapper[4810]: I1203 06:46:29.738071 4810 generic.go:334] "Generic (PLEG): container finished" podID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerID="e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06" exitCode=0 Dec 03 06:46:29 crc kubenswrapper[4810]: I1203 06:46:29.738113 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerDied","Data":"e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06"} Dec 03 06:46:31 crc kubenswrapper[4810]: I1203 06:46:31.756526 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerStarted","Data":"653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8"} Dec 03 06:46:31 crc kubenswrapper[4810]: I1203 06:46:31.783052 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gpfgq" podStartSLOduration=3.069647737 podStartE2EDuration="5.783034418s" podCreationTimestamp="2025-12-03 06:46:26 +0000 UTC" firstStartedPulling="2025-12-03 06:46:27.697975662 +0000 UTC m=+3911.633436503" lastFinishedPulling="2025-12-03 06:46:30.411362343 +0000 UTC m=+3914.346823184" observedRunningTime="2025-12-03 06:46:31.775818847 +0000 UTC m=+3915.711279688" watchObservedRunningTime="2025-12-03 06:46:31.783034418 +0000 UTC m=+3915.718495259" Dec 03 06:46:33 crc kubenswrapper[4810]: I1203 06:46:33.378950 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:46:33 crc kubenswrapper[4810]: E1203 06:46:33.379802 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:46:36 crc kubenswrapper[4810]: I1203 06:46:36.813560 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:36 crc kubenswrapper[4810]: I1203 06:46:36.814469 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:37 crc kubenswrapper[4810]: I1203 06:46:37.859280 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gpfgq" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="registry-server" probeResult="failure" output=< Dec 03 06:46:37 crc kubenswrapper[4810]: timeout: failed to connect service ":50051" within 1s Dec 03 06:46:37 crc kubenswrapper[4810]: > Dec 03 06:46:45 crc kubenswrapper[4810]: I1203 06:46:45.377668 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:46:45 crc kubenswrapper[4810]: E1203 06:46:45.378979 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:46:46 crc kubenswrapper[4810]: I1203 06:46:46.891209 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:46 crc kubenswrapper[4810]: I1203 06:46:46.982594 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:47 crc kubenswrapper[4810]: I1203 06:46:47.140434 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gpfgq"] Dec 03 06:46:47 crc kubenswrapper[4810]: I1203 06:46:47.935700 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gpfgq" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="registry-server" containerID="cri-o://653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8" gracePeriod=2 Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.400778 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.482710 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-utilities\") pod \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.482832 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-catalog-content\") pod \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.482899 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccmq8\" (UniqueName: \"kubernetes.io/projected/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-kube-api-access-ccmq8\") pod \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\" (UID: \"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225\") " Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.483558 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-utilities" (OuterVolumeSpecName: "utilities") pod "6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" (UID: "6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.492772 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-kube-api-access-ccmq8" (OuterVolumeSpecName: "kube-api-access-ccmq8") pod "6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" (UID: "6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225"). InnerVolumeSpecName "kube-api-access-ccmq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.586059 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.586109 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccmq8\" (UniqueName: \"kubernetes.io/projected/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-kube-api-access-ccmq8\") on node \"crc\" DevicePath \"\"" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.587355 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" (UID: "6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.688008 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.947861 4810 generic.go:334] "Generic (PLEG): container finished" podID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerID="653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8" exitCode=0 Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.947914 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerDied","Data":"653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8"} Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.947947 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gpfgq" event={"ID":"6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225","Type":"ContainerDied","Data":"6927fc0d839f2d86e8604564c3aa8b4bd106fa3902e3e959847351f96e4a36de"} Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.947968 4810 scope.go:117] "RemoveContainer" containerID="653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.948666 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gpfgq" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.988683 4810 scope.go:117] "RemoveContainer" containerID="e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06" Dec 03 06:46:48 crc kubenswrapper[4810]: I1203 06:46:48.997870 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gpfgq"] Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.008483 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gpfgq"] Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.016653 4810 scope.go:117] "RemoveContainer" containerID="21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03" Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.092541 4810 scope.go:117] "RemoveContainer" containerID="653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8" Dec 03 06:46:49 crc kubenswrapper[4810]: E1203 06:46:49.093162 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8\": container with ID starting with 653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8 not found: ID does not exist" containerID="653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8" Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.093255 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8"} err="failed to get container status \"653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8\": rpc error: code = NotFound desc = could not find container \"653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8\": container with ID starting with 653d183e273af480c1298b95bef1fcf9ededd5d17a34dbbfa07a20dad6c1c5f8 not found: ID does not exist" Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.093315 4810 scope.go:117] "RemoveContainer" containerID="e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06" Dec 03 06:46:49 crc kubenswrapper[4810]: E1203 06:46:49.093694 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06\": container with ID starting with e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06 not found: ID does not exist" containerID="e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06" Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.093852 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06"} err="failed to get container status \"e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06\": rpc error: code = NotFound desc = could not find container \"e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06\": container with ID starting with e1c3312bf3a36323132d7e9546457e79dc86de12cf9f44e94db52fd427d97a06 not found: ID does not exist" Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.093964 4810 scope.go:117] "RemoveContainer" containerID="21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03" Dec 03 06:46:49 crc kubenswrapper[4810]: E1203 06:46:49.094318 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03\": container with ID starting with 21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03 not found: ID does not exist" containerID="21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03" Dec 03 06:46:49 crc kubenswrapper[4810]: I1203 06:46:49.094350 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03"} err="failed to get container status \"21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03\": rpc error: code = NotFound desc = could not find container \"21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03\": container with ID starting with 21f0f264f857c037056df77f0962e4780fab1a56fa8c47b9d47e487db3d99b03 not found: ID does not exist" Dec 03 06:46:50 crc kubenswrapper[4810]: I1203 06:46:50.396040 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" path="/var/lib/kubelet/pods/6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225/volumes" Dec 03 06:46:58 crc kubenswrapper[4810]: I1203 06:46:58.378491 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:46:58 crc kubenswrapper[4810]: E1203 06:46:58.379624 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:47:09 crc kubenswrapper[4810]: I1203 06:47:09.379250 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:47:09 crc kubenswrapper[4810]: E1203 06:47:09.380121 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:47:20 crc kubenswrapper[4810]: I1203 06:47:20.377838 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:47:20 crc kubenswrapper[4810]: E1203 06:47:20.378460 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:47:34 crc kubenswrapper[4810]: I1203 06:47:34.386430 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:47:34 crc kubenswrapper[4810]: E1203 06:47:34.387670 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:47:46 crc kubenswrapper[4810]: I1203 06:47:46.388664 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:47:46 crc kubenswrapper[4810]: E1203 06:47:46.391493 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:47:57 crc kubenswrapper[4810]: I1203 06:47:57.377439 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:47:57 crc kubenswrapper[4810]: E1203 06:47:57.379207 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:48:08 crc kubenswrapper[4810]: I1203 06:48:08.377717 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:48:08 crc kubenswrapper[4810]: E1203 06:48:08.378698 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:48:19 crc kubenswrapper[4810]: I1203 06:48:19.377848 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:48:19 crc kubenswrapper[4810]: E1203 06:48:19.378948 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:48:32 crc kubenswrapper[4810]: I1203 06:48:32.378033 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:48:32 crc kubenswrapper[4810]: E1203 06:48:32.378709 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:48:47 crc kubenswrapper[4810]: I1203 06:48:47.377824 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:48:47 crc kubenswrapper[4810]: E1203 06:48:47.379235 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:48:58 crc kubenswrapper[4810]: I1203 06:48:58.377833 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:49:00 crc kubenswrapper[4810]: I1203 06:49:00.345048 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"8220f58f4f4a4254b484d5ed1eae034f8f84b205f2bd8961d3fdf0b01709c778"} Dec 03 06:49:33 crc kubenswrapper[4810]: I1203 06:49:33.938051 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m2q8q"] Dec 03 06:49:33 crc kubenswrapper[4810]: E1203 06:49:33.940494 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="registry-server" Dec 03 06:49:33 crc kubenswrapper[4810]: I1203 06:49:33.940611 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="registry-server" Dec 03 06:49:33 crc kubenswrapper[4810]: E1203 06:49:33.940659 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="extract-utilities" Dec 03 06:49:33 crc kubenswrapper[4810]: I1203 06:49:33.940666 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="extract-utilities" Dec 03 06:49:33 crc kubenswrapper[4810]: E1203 06:49:33.940690 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="extract-content" Dec 03 06:49:33 crc kubenswrapper[4810]: I1203 06:49:33.940697 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="extract-content" Dec 03 06:49:33 crc kubenswrapper[4810]: I1203 06:49:33.942213 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ff7fb76-2a6f-4ef4-8ec0-ad8e9c275225" containerName="registry-server" Dec 03 06:49:33 crc kubenswrapper[4810]: I1203 06:49:33.957843 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:33.997913 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m2q8q"] Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.102760 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-catalog-content\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.102987 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw5tp\" (UniqueName: \"kubernetes.io/projected/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-kube-api-access-tw5tp\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.103048 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-utilities\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.204503 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-catalog-content\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.204655 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw5tp\" (UniqueName: \"kubernetes.io/projected/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-kube-api-access-tw5tp\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.204689 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-utilities\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.205205 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-catalog-content\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.205238 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-utilities\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.224019 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw5tp\" (UniqueName: \"kubernetes.io/projected/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-kube-api-access-tw5tp\") pod \"certified-operators-m2q8q\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.300963 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:34 crc kubenswrapper[4810]: I1203 06:49:34.838609 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m2q8q"] Dec 03 06:49:34 crc kubenswrapper[4810]: W1203 06:49:34.859903 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ddf77a2_1f10_4b4f_8eb3_0d8cc8417c4c.slice/crio-0b3eb7bb793b55b9fc5004e74fc8a98be35373636955ae7231352ad76199a37a WatchSource:0}: Error finding container 0b3eb7bb793b55b9fc5004e74fc8a98be35373636955ae7231352ad76199a37a: Status 404 returned error can't find the container with id 0b3eb7bb793b55b9fc5004e74fc8a98be35373636955ae7231352ad76199a37a Dec 03 06:49:35 crc kubenswrapper[4810]: I1203 06:49:35.699988 4810 generic.go:334] "Generic (PLEG): container finished" podID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerID="25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df" exitCode=0 Dec 03 06:49:35 crc kubenswrapper[4810]: I1203 06:49:35.700037 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerDied","Data":"25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df"} Dec 03 06:49:35 crc kubenswrapper[4810]: I1203 06:49:35.700345 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerStarted","Data":"0b3eb7bb793b55b9fc5004e74fc8a98be35373636955ae7231352ad76199a37a"} Dec 03 06:49:36 crc kubenswrapper[4810]: I1203 06:49:36.708668 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerStarted","Data":"c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3"} Dec 03 06:49:37 crc kubenswrapper[4810]: I1203 06:49:37.721751 4810 generic.go:334] "Generic (PLEG): container finished" podID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerID="c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3" exitCode=0 Dec 03 06:49:37 crc kubenswrapper[4810]: I1203 06:49:37.722094 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerDied","Data":"c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3"} Dec 03 06:49:38 crc kubenswrapper[4810]: I1203 06:49:38.741662 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerStarted","Data":"1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a"} Dec 03 06:49:38 crc kubenswrapper[4810]: I1203 06:49:38.783543 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m2q8q" podStartSLOduration=3.273743449 podStartE2EDuration="5.783512082s" podCreationTimestamp="2025-12-03 06:49:33 +0000 UTC" firstStartedPulling="2025-12-03 06:49:35.70206494 +0000 UTC m=+4099.637525781" lastFinishedPulling="2025-12-03 06:49:38.211833543 +0000 UTC m=+4102.147294414" observedRunningTime="2025-12-03 06:49:38.771233918 +0000 UTC m=+4102.706694829" watchObservedRunningTime="2025-12-03 06:49:38.783512082 +0000 UTC m=+4102.718972973" Dec 03 06:49:44 crc kubenswrapper[4810]: I1203 06:49:44.301492 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:44 crc kubenswrapper[4810]: I1203 06:49:44.301960 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:44 crc kubenswrapper[4810]: I1203 06:49:44.352387 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:44 crc kubenswrapper[4810]: I1203 06:49:44.883322 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:44 crc kubenswrapper[4810]: I1203 06:49:44.958599 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m2q8q"] Dec 03 06:49:46 crc kubenswrapper[4810]: I1203 06:49:46.831902 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m2q8q" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="registry-server" containerID="cri-o://1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a" gracePeriod=2 Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.390809 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.471682 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw5tp\" (UniqueName: \"kubernetes.io/projected/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-kube-api-access-tw5tp\") pod \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.471921 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-catalog-content\") pod \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.472148 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-utilities\") pod \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\" (UID: \"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c\") " Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.473265 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-utilities" (OuterVolumeSpecName: "utilities") pod "2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" (UID: "2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.482939 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-kube-api-access-tw5tp" (OuterVolumeSpecName: "kube-api-access-tw5tp") pod "2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" (UID: "2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c"). InnerVolumeSpecName "kube-api-access-tw5tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.520813 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" (UID: "2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.575077 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.575131 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw5tp\" (UniqueName: \"kubernetes.io/projected/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-kube-api-access-tw5tp\") on node \"crc\" DevicePath \"\"" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.575146 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.844226 4810 generic.go:334] "Generic (PLEG): container finished" podID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerID="1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a" exitCode=0 Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.844280 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerDied","Data":"1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a"} Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.844626 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2q8q" event={"ID":"2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c","Type":"ContainerDied","Data":"0b3eb7bb793b55b9fc5004e74fc8a98be35373636955ae7231352ad76199a37a"} Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.844673 4810 scope.go:117] "RemoveContainer" containerID="1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.844331 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2q8q" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.869280 4810 scope.go:117] "RemoveContainer" containerID="c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.895911 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m2q8q"] Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.901527 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m2q8q"] Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.915176 4810 scope.go:117] "RemoveContainer" containerID="25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.963069 4810 scope.go:117] "RemoveContainer" containerID="1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a" Dec 03 06:49:47 crc kubenswrapper[4810]: E1203 06:49:47.963591 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a\": container with ID starting with 1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a not found: ID does not exist" containerID="1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.963658 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a"} err="failed to get container status \"1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a\": rpc error: code = NotFound desc = could not find container \"1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a\": container with ID starting with 1c2675be1776c50c983289b63f6174220539f675dc9840c4e1b3b13baca6b26a not found: ID does not exist" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.963701 4810 scope.go:117] "RemoveContainer" containerID="c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3" Dec 03 06:49:47 crc kubenswrapper[4810]: E1203 06:49:47.964157 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3\": container with ID starting with c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3 not found: ID does not exist" containerID="c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.964180 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3"} err="failed to get container status \"c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3\": rpc error: code = NotFound desc = could not find container \"c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3\": container with ID starting with c606922bb07b1ded79badc157c64b1dbbb888708c68f6a08f46de17891240eb3 not found: ID does not exist" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.964210 4810 scope.go:117] "RemoveContainer" containerID="25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df" Dec 03 06:49:47 crc kubenswrapper[4810]: E1203 06:49:47.964625 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df\": container with ID starting with 25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df not found: ID does not exist" containerID="25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df" Dec 03 06:49:47 crc kubenswrapper[4810]: I1203 06:49:47.964672 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df"} err="failed to get container status \"25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df\": rpc error: code = NotFound desc = could not find container \"25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df\": container with ID starting with 25178e2f3d0f4b15f3e085b13dd5917c739fafcc7bbcc4c2d9c693e3848a04df not found: ID does not exist" Dec 03 06:49:48 crc kubenswrapper[4810]: I1203 06:49:48.398868 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" path="/var/lib/kubelet/pods/2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c/volumes" Dec 03 06:50:14 crc kubenswrapper[4810]: I1203 06:50:14.131419 4810 generic.go:334] "Generic (PLEG): container finished" podID="369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" containerID="7a56db0da43cbeb3fd9eeedf3ef1e2fc88d886df42055893b618c74ee7114221" exitCode=0 Dec 03 06:50:14 crc kubenswrapper[4810]: I1203 06:50:14.131505 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f","Type":"ContainerDied","Data":"7a56db0da43cbeb3fd9eeedf3ef1e2fc88d886df42055893b618c74ee7114221"} Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.606553 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.741787 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.741860 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.741923 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-workdir\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.741968 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ca-certs\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.741995 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-config-data\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.742092 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq5f8\" (UniqueName: \"kubernetes.io/projected/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-kube-api-access-wq5f8\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.742141 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config-secret\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.742175 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-temporary\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.742204 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ssh-key\") pod \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\" (UID: \"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f\") " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.743166 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.743237 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-config-data" (OuterVolumeSpecName: "config-data") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.750078 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.751552 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-kube-api-access-wq5f8" (OuterVolumeSpecName: "kube-api-access-wq5f8") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "kube-api-access-wq5f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.759371 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.776408 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.779061 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.804380 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.817470 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" (UID: "369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.844762 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.844845 4810 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.844862 4810 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.844908 4810 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.844923 4810 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.844935 4810 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.859815 4810 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.859853 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.859867 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq5f8\" (UniqueName: \"kubernetes.io/projected/369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f-kube-api-access-wq5f8\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.871623 4810 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 03 06:50:15 crc kubenswrapper[4810]: I1203 06:50:15.961779 4810 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 03 06:50:16 crc kubenswrapper[4810]: I1203 06:50:16.155101 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f","Type":"ContainerDied","Data":"43b85d63fb03e308c2f35aadec0d3c32c74b86050345268b89dac1a792316ee0"} Dec 03 06:50:16 crc kubenswrapper[4810]: I1203 06:50:16.155150 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43b85d63fb03e308c2f35aadec0d3c32c74b86050345268b89dac1a792316ee0" Dec 03 06:50:16 crc kubenswrapper[4810]: I1203 06:50:16.155161 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.489979 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 06:50:23 crc kubenswrapper[4810]: E1203 06:50:23.491355 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" containerName="tempest-tests-tempest-tests-runner" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.491382 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" containerName="tempest-tests-tempest-tests-runner" Dec 03 06:50:23 crc kubenswrapper[4810]: E1203 06:50:23.491403 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="extract-utilities" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.491416 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="extract-utilities" Dec 03 06:50:23 crc kubenswrapper[4810]: E1203 06:50:23.491450 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="registry-server" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.491462 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="registry-server" Dec 03 06:50:23 crc kubenswrapper[4810]: E1203 06:50:23.491487 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="extract-content" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.491499 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="extract-content" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.491925 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f" containerName="tempest-tests-tempest-tests-runner" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.491960 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ddf77a2-1f10-4b4f-8eb3-0d8cc8417c4c" containerName="registry-server" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.493291 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.497444 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bx766" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.506824 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.612165 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg898\" (UniqueName: \"kubernetes.io/projected/cae965d3-f73d-492c-8f57-f5a9e57c1d53-kube-api-access-dg898\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.612575 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.715117 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.715387 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg898\" (UniqueName: \"kubernetes.io/projected/cae965d3-f73d-492c-8f57-f5a9e57c1d53-kube-api-access-dg898\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.715864 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.760035 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg898\" (UniqueName: \"kubernetes.io/projected/cae965d3-f73d-492c-8f57-f5a9e57c1d53-kube-api-access-dg898\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.772828 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cae965d3-f73d-492c-8f57-f5a9e57c1d53\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:23 crc kubenswrapper[4810]: I1203 06:50:23.826866 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 06:50:24 crc kubenswrapper[4810]: I1203 06:50:24.420931 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 06:50:25 crc kubenswrapper[4810]: I1203 06:50:25.246033 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cae965d3-f73d-492c-8f57-f5a9e57c1d53","Type":"ContainerStarted","Data":"bffb8c4204e789647000319a0428fc7aaf81e0a1577c2b18d0a088304e27c6fa"} Dec 03 06:50:26 crc kubenswrapper[4810]: I1203 06:50:26.257424 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cae965d3-f73d-492c-8f57-f5a9e57c1d53","Type":"ContainerStarted","Data":"4301393045c5f928379c45f048fbc5cfad9c185e0be424ba132b9bdd6dea5620"} Dec 03 06:50:26 crc kubenswrapper[4810]: I1203 06:50:26.282790 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.198472804 podStartE2EDuration="3.282722138s" podCreationTimestamp="2025-12-03 06:50:23 +0000 UTC" firstStartedPulling="2025-12-03 06:50:24.891588709 +0000 UTC m=+4148.827049590" lastFinishedPulling="2025-12-03 06:50:25.975838083 +0000 UTC m=+4149.911298924" observedRunningTime="2025-12-03 06:50:26.271927384 +0000 UTC m=+4150.207388225" watchObservedRunningTime="2025-12-03 06:50:26.282722138 +0000 UTC m=+4150.218183019" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.097207 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xt4vr/must-gather-g4blm"] Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.099581 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.101234 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xt4vr"/"kube-root-ca.crt" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.101433 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xt4vr"/"default-dockercfg-xnhf5" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.106224 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xt4vr/must-gather-g4blm"] Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.109618 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xt4vr"/"openshift-service-ca.crt" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.193654 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/101e3b8d-2028-41c8-af69-3f7fbfb31209-must-gather-output\") pod \"must-gather-g4blm\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.193695 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grvr2\" (UniqueName: \"kubernetes.io/projected/101e3b8d-2028-41c8-af69-3f7fbfb31209-kube-api-access-grvr2\") pod \"must-gather-g4blm\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.295584 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/101e3b8d-2028-41c8-af69-3f7fbfb31209-must-gather-output\") pod \"must-gather-g4blm\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.295631 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grvr2\" (UniqueName: \"kubernetes.io/projected/101e3b8d-2028-41c8-af69-3f7fbfb31209-kube-api-access-grvr2\") pod \"must-gather-g4blm\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.296060 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/101e3b8d-2028-41c8-af69-3f7fbfb31209-must-gather-output\") pod \"must-gather-g4blm\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.316075 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grvr2\" (UniqueName: \"kubernetes.io/projected/101e3b8d-2028-41c8-af69-3f7fbfb31209-kube-api-access-grvr2\") pod \"must-gather-g4blm\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.415732 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.734071 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xt4vr/must-gather-g4blm"] Dec 03 06:50:49 crc kubenswrapper[4810]: I1203 06:50:49.745391 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:50:50 crc kubenswrapper[4810]: I1203 06:50:50.528924 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/must-gather-g4blm" event={"ID":"101e3b8d-2028-41c8-af69-3f7fbfb31209","Type":"ContainerStarted","Data":"e9057682add8e75c9377ef5ef7a0d736788e43ea990b3ec884c7289909f6cf8a"} Dec 03 06:50:56 crc kubenswrapper[4810]: I1203 06:50:56.600342 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/must-gather-g4blm" event={"ID":"101e3b8d-2028-41c8-af69-3f7fbfb31209","Type":"ContainerStarted","Data":"feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860"} Dec 03 06:50:56 crc kubenswrapper[4810]: I1203 06:50:56.600871 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/must-gather-g4blm" event={"ID":"101e3b8d-2028-41c8-af69-3f7fbfb31209","Type":"ContainerStarted","Data":"482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487"} Dec 03 06:50:56 crc kubenswrapper[4810]: I1203 06:50:56.620391 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xt4vr/must-gather-g4blm" podStartSLOduration=1.537622775 podStartE2EDuration="7.620372894s" podCreationTimestamp="2025-12-03 06:50:49 +0000 UTC" firstStartedPulling="2025-12-03 06:50:49.74521433 +0000 UTC m=+4173.680675171" lastFinishedPulling="2025-12-03 06:50:55.827964449 +0000 UTC m=+4179.763425290" observedRunningTime="2025-12-03 06:50:56.61720095 +0000 UTC m=+4180.552661791" watchObservedRunningTime="2025-12-03 06:50:56.620372894 +0000 UTC m=+4180.555833755" Dec 03 06:50:59 crc kubenswrapper[4810]: I1203 06:50:59.775181 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xt4vr/crc-debug-qnpjn"] Dec 03 06:50:59 crc kubenswrapper[4810]: I1203 06:50:59.776695 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:50:59 crc kubenswrapper[4810]: I1203 06:50:59.914383 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdjpb\" (UniqueName: \"kubernetes.io/projected/e1deed43-5ad7-4538-8840-2a6e345ecce8-kube-api-access-tdjpb\") pod \"crc-debug-qnpjn\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:50:59 crc kubenswrapper[4810]: I1203 06:50:59.914965 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1deed43-5ad7-4538-8840-2a6e345ecce8-host\") pod \"crc-debug-qnpjn\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:00 crc kubenswrapper[4810]: I1203 06:51:00.017272 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1deed43-5ad7-4538-8840-2a6e345ecce8-host\") pod \"crc-debug-qnpjn\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:00 crc kubenswrapper[4810]: I1203 06:51:00.017457 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1deed43-5ad7-4538-8840-2a6e345ecce8-host\") pod \"crc-debug-qnpjn\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:00 crc kubenswrapper[4810]: I1203 06:51:00.017566 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdjpb\" (UniqueName: \"kubernetes.io/projected/e1deed43-5ad7-4538-8840-2a6e345ecce8-kube-api-access-tdjpb\") pod \"crc-debug-qnpjn\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:00 crc kubenswrapper[4810]: I1203 06:51:00.283202 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdjpb\" (UniqueName: \"kubernetes.io/projected/e1deed43-5ad7-4538-8840-2a6e345ecce8-kube-api-access-tdjpb\") pod \"crc-debug-qnpjn\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:00 crc kubenswrapper[4810]: I1203 06:51:00.394677 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:00 crc kubenswrapper[4810]: W1203 06:51:00.437422 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1deed43_5ad7_4538_8840_2a6e345ecce8.slice/crio-c447cae7e983ecccda831df8583f0756ae3b020ee37959d4affda31a89d12fb5 WatchSource:0}: Error finding container c447cae7e983ecccda831df8583f0756ae3b020ee37959d4affda31a89d12fb5: Status 404 returned error can't find the container with id c447cae7e983ecccda831df8583f0756ae3b020ee37959d4affda31a89d12fb5 Dec 03 06:51:00 crc kubenswrapper[4810]: I1203 06:51:00.646493 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" event={"ID":"e1deed43-5ad7-4538-8840-2a6e345ecce8","Type":"ContainerStarted","Data":"c447cae7e983ecccda831df8583f0756ae3b020ee37959d4affda31a89d12fb5"} Dec 03 06:51:12 crc kubenswrapper[4810]: I1203 06:51:12.756175 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" event={"ID":"e1deed43-5ad7-4538-8840-2a6e345ecce8","Type":"ContainerStarted","Data":"19ab936503a509b3653cf0dadae58137e89827ca778bae8f03f4d1b7306cee60"} Dec 03 06:51:12 crc kubenswrapper[4810]: I1203 06:51:12.772359 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" podStartSLOduration=2.173433773 podStartE2EDuration="13.772337593s" podCreationTimestamp="2025-12-03 06:50:59 +0000 UTC" firstStartedPulling="2025-12-03 06:51:00.440782711 +0000 UTC m=+4184.376243562" lastFinishedPulling="2025-12-03 06:51:12.039686541 +0000 UTC m=+4195.975147382" observedRunningTime="2025-12-03 06:51:12.769021386 +0000 UTC m=+4196.704482227" watchObservedRunningTime="2025-12-03 06:51:12.772337593 +0000 UTC m=+4196.707798444" Dec 03 06:51:25 crc kubenswrapper[4810]: I1203 06:51:25.678121 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:51:25 crc kubenswrapper[4810]: I1203 06:51:25.679053 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:51:27 crc kubenswrapper[4810]: I1203 06:51:27.907761 4810 generic.go:334] "Generic (PLEG): container finished" podID="e1deed43-5ad7-4538-8840-2a6e345ecce8" containerID="19ab936503a509b3653cf0dadae58137e89827ca778bae8f03f4d1b7306cee60" exitCode=0 Dec 03 06:51:27 crc kubenswrapper[4810]: I1203 06:51:27.907832 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" event={"ID":"e1deed43-5ad7-4538-8840-2a6e345ecce8","Type":"ContainerDied","Data":"19ab936503a509b3653cf0dadae58137e89827ca778bae8f03f4d1b7306cee60"} Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.199913 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.235804 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xt4vr/crc-debug-qnpjn"] Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.248999 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xt4vr/crc-debug-qnpjn"] Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.256440 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1deed43-5ad7-4538-8840-2a6e345ecce8-host\") pod \"e1deed43-5ad7-4538-8840-2a6e345ecce8\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.256514 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1deed43-5ad7-4538-8840-2a6e345ecce8-host" (OuterVolumeSpecName: "host") pod "e1deed43-5ad7-4538-8840-2a6e345ecce8" (UID: "e1deed43-5ad7-4538-8840-2a6e345ecce8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.256554 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdjpb\" (UniqueName: \"kubernetes.io/projected/e1deed43-5ad7-4538-8840-2a6e345ecce8-kube-api-access-tdjpb\") pod \"e1deed43-5ad7-4538-8840-2a6e345ecce8\" (UID: \"e1deed43-5ad7-4538-8840-2a6e345ecce8\") " Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.257245 4810 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1deed43-5ad7-4538-8840-2a6e345ecce8-host\") on node \"crc\" DevicePath \"\"" Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.267878 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1deed43-5ad7-4538-8840-2a6e345ecce8-kube-api-access-tdjpb" (OuterVolumeSpecName: "kube-api-access-tdjpb") pod "e1deed43-5ad7-4538-8840-2a6e345ecce8" (UID: "e1deed43-5ad7-4538-8840-2a6e345ecce8"). InnerVolumeSpecName "kube-api-access-tdjpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.359648 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdjpb\" (UniqueName: \"kubernetes.io/projected/e1deed43-5ad7-4538-8840-2a6e345ecce8-kube-api-access-tdjpb\") on node \"crc\" DevicePath \"\"" Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.945984 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c447cae7e983ecccda831df8583f0756ae3b020ee37959d4affda31a89d12fb5" Dec 03 06:51:29 crc kubenswrapper[4810]: I1203 06:51:29.946391 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-qnpjn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.385724 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1deed43-5ad7-4538-8840-2a6e345ecce8" path="/var/lib/kubelet/pods/e1deed43-5ad7-4538-8840-2a6e345ecce8/volumes" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.441237 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xt4vr/crc-debug-cwhxn"] Dec 03 06:51:30 crc kubenswrapper[4810]: E1203 06:51:30.441579 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1deed43-5ad7-4538-8840-2a6e345ecce8" containerName="container-00" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.441595 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1deed43-5ad7-4538-8840-2a6e345ecce8" containerName="container-00" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.441811 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1deed43-5ad7-4538-8840-2a6e345ecce8" containerName="container-00" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.442407 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.487763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn64c\" (UniqueName: \"kubernetes.io/projected/8ab3b408-6d76-4754-847a-35d5e82d8972-kube-api-access-dn64c\") pod \"crc-debug-cwhxn\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.487941 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ab3b408-6d76-4754-847a-35d5e82d8972-host\") pod \"crc-debug-cwhxn\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.589401 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn64c\" (UniqueName: \"kubernetes.io/projected/8ab3b408-6d76-4754-847a-35d5e82d8972-kube-api-access-dn64c\") pod \"crc-debug-cwhxn\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.589520 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ab3b408-6d76-4754-847a-35d5e82d8972-host\") pod \"crc-debug-cwhxn\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.589605 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ab3b408-6d76-4754-847a-35d5e82d8972-host\") pod \"crc-debug-cwhxn\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.615397 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn64c\" (UniqueName: \"kubernetes.io/projected/8ab3b408-6d76-4754-847a-35d5e82d8972-kube-api-access-dn64c\") pod \"crc-debug-cwhxn\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.757238 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:30 crc kubenswrapper[4810]: I1203 06:51:30.955266 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" event={"ID":"8ab3b408-6d76-4754-847a-35d5e82d8972","Type":"ContainerStarted","Data":"1580e53d114e4069b6b4c7b8fb8d8deb08bcf04fbc2a1380e1312fd8d9084acf"} Dec 03 06:51:31 crc kubenswrapper[4810]: I1203 06:51:31.966704 4810 generic.go:334] "Generic (PLEG): container finished" podID="8ab3b408-6d76-4754-847a-35d5e82d8972" containerID="c3bdcdb35bf7272de449605ff3a32422a201384dba0170d949ebef3dbec4453c" exitCode=1 Dec 03 06:51:31 crc kubenswrapper[4810]: I1203 06:51:31.966757 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" event={"ID":"8ab3b408-6d76-4754-847a-35d5e82d8972","Type":"ContainerDied","Data":"c3bdcdb35bf7272de449605ff3a32422a201384dba0170d949ebef3dbec4453c"} Dec 03 06:51:32 crc kubenswrapper[4810]: I1203 06:51:32.004763 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xt4vr/crc-debug-cwhxn"] Dec 03 06:51:32 crc kubenswrapper[4810]: I1203 06:51:32.016644 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xt4vr/crc-debug-cwhxn"] Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.088989 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.146314 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ab3b408-6d76-4754-847a-35d5e82d8972-host\") pod \"8ab3b408-6d76-4754-847a-35d5e82d8972\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.146385 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn64c\" (UniqueName: \"kubernetes.io/projected/8ab3b408-6d76-4754-847a-35d5e82d8972-kube-api-access-dn64c\") pod \"8ab3b408-6d76-4754-847a-35d5e82d8972\" (UID: \"8ab3b408-6d76-4754-847a-35d5e82d8972\") " Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.146452 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ab3b408-6d76-4754-847a-35d5e82d8972-host" (OuterVolumeSpecName: "host") pod "8ab3b408-6d76-4754-847a-35d5e82d8972" (UID: "8ab3b408-6d76-4754-847a-35d5e82d8972"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.146788 4810 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ab3b408-6d76-4754-847a-35d5e82d8972-host\") on node \"crc\" DevicePath \"\"" Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.152911 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ab3b408-6d76-4754-847a-35d5e82d8972-kube-api-access-dn64c" (OuterVolumeSpecName: "kube-api-access-dn64c") pod "8ab3b408-6d76-4754-847a-35d5e82d8972" (UID: "8ab3b408-6d76-4754-847a-35d5e82d8972"). InnerVolumeSpecName "kube-api-access-dn64c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.248013 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn64c\" (UniqueName: \"kubernetes.io/projected/8ab3b408-6d76-4754-847a-35d5e82d8972-kube-api-access-dn64c\") on node \"crc\" DevicePath \"\"" Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.988862 4810 scope.go:117] "RemoveContainer" containerID="c3bdcdb35bf7272de449605ff3a32422a201384dba0170d949ebef3dbec4453c" Dec 03 06:51:33 crc kubenswrapper[4810]: I1203 06:51:33.988971 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/crc-debug-cwhxn" Dec 03 06:51:34 crc kubenswrapper[4810]: I1203 06:51:34.389217 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ab3b408-6d76-4754-847a-35d5e82d8972" path="/var/lib/kubelet/pods/8ab3b408-6d76-4754-847a-35d5e82d8972/volumes" Dec 03 06:51:55 crc kubenswrapper[4810]: I1203 06:51:55.677427 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:51:55 crc kubenswrapper[4810]: I1203 06:51:55.677933 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.201308 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c9578688-qf8tw_3554de12-10d1-48a4-a17e-d5ce9955fa9c/barbican-api/0.log" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.278507 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c9578688-qf8tw_3554de12-10d1-48a4-a17e-d5ce9955fa9c/barbican-api-log/0.log" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.405944 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75bb559794-4gn48_3260c501-1348-49f4-8182-437086a5649e/barbican-keystone-listener/0.log" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.460117 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75bb559794-4gn48_3260c501-1348-49f4-8182-437086a5649e/barbican-keystone-listener-log/0.log" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.567370 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-856ff6b4bf-thl85_9d889657-930a-4878-8727-91b0ab50723c/barbican-worker/0.log" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.642857 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-856ff6b4bf-thl85_9d889657-930a-4878-8727-91b0ab50723c/barbican-worker-log/0.log" Dec 03 06:52:05 crc kubenswrapper[4810]: I1203 06:52:05.747918 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-f489j_4dc1bd47-9cbd-4849-b466-bf72ec92cf14/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.055230 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/ceilometer-central-agent/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.087104 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/ceilometer-notification-agent/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.109686 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/proxy-httpd/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.151186 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/sg-core/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.307979 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca/cinder-api-log/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.462639 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca/cinder-api/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.568336 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_d2130a28-40ad-4938-a265-8114fbcf38a1/probe/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.594628 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_d2130a28-40ad-4938-a265-8114fbcf38a1/cinder-scheduler/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.653127 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k_4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.780096 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-x76jl_b303053e-72d8-44d5-8766-d83b7fcba87a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:06 crc kubenswrapper[4810]: I1203 06:52:06.834170 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-797f4ccc47-h492p_c2e2cdc0-2bb3-450f-b42d-8bfeee479f46/init/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.058562 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf_15aa89be-fc4f-4965-99f0-3eb7bce02b10/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.082699 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-797f4ccc47-h492p_c2e2cdc0-2bb3-450f-b42d-8bfeee479f46/dnsmasq-dns/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.090282 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-797f4ccc47-h492p_c2e2cdc0-2bb3-450f-b42d-8bfeee479f46/init/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.274493 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2f6dafcd-7f4b-47a9-b5ae-be22f6c84491/glance-httpd/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.293585 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2f6dafcd-7f4b-47a9-b5ae-be22f6c84491/glance-log/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.459534 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6b4868fe-4bdb-492d-bbb1-94d2793b41eb/glance-log/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.483612 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6b4868fe-4bdb-492d-bbb1-94d2793b41eb/glance-httpd/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.576032 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp_ee03c5ac-6b76-4852-b07f-b73140f037dd/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.718512 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-6kpml_04e18cd1-868a-4d9c-882a-c1af0ef1f4dc/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:07 crc kubenswrapper[4810]: I1203 06:52:07.941176 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29412361-hn96l_3a8164cf-937e-4f52-a03e-00708ad12ebb/keystone-cron/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.122948 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-69cffb6c97-gskt7_16e8ad53-6c83-4176-94d2-e37a0ff234e2/keystone-api/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.198946 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_8d045016-8932-4293-9f53-71663d354934/kube-state-metrics/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.326481 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt_d19e7058-371b-4ac9-811a-949bc24e8b03/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.616317 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4/memcached/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.696006 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-74c9fd966f-8fc7k_411b48fc-bafd-47a2-8bd6-c31e2132b09f/neutron-api/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.713373 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-74c9fd966f-8fc7k_411b48fc-bafd-47a2-8bd6-c31e2132b09f/neutron-httpd/0.log" Dec 03 06:52:08 crc kubenswrapper[4810]: I1203 06:52:08.782718 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl_201a4b8d-5ecb-4cc4-bacb-51d499efb485/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.177066 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_51f2670f-1de2-4383-a6a9-ea85a63a7586/nova-cell0-conductor-conductor/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.210602 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c/nova-api-log/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.308840 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c/nova-api-api/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.375656 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_70eeea83-764d-4b0f-be6a-74c31a35c455/nova-cell1-conductor-conductor/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.480554 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_35ba7fd0-41b1-4669-8cb7-5538b4ef5492/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.584611 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-fmtpt_52125ad1-c593-45bd-b8d0-9a46aa72f614/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.733076 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_75e0323d-ca57-4a7e-a883-35da97b7e9d7/nova-metadata-log/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.893352 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_788c2be7-daf0-4cb1-9d7e-0f351e348603/nova-scheduler-scheduler/0.log" Dec 03 06:52:09 crc kubenswrapper[4810]: I1203 06:52:09.937033 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8d3f680-ee84-4492-8d18-278d088e1332/mysql-bootstrap/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.149726 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8d3f680-ee84-4492-8d18-278d088e1332/galera/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.178519 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8d3f680-ee84-4492-8d18-278d088e1332/mysql-bootstrap/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.204943 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b0b5ee63-b0f3-4133-a294-69ed680c5374/mysql-bootstrap/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.434913 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b0b5ee63-b0f3-4133-a294-69ed680c5374/galera/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.453921 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_db524b8c-98e9-41bf-be3f-5376226012e4/openstackclient/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.459507 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b0b5ee63-b0f3-4133-a294-69ed680c5374/mysql-bootstrap/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.644598 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4x8tn_0df96f16-d193-4ecc-a624-e721c61a42af/ovn-controller/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.646643 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_75e0323d-ca57-4a7e-a883-35da97b7e9d7/nova-metadata-metadata/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.693584 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-hthr4_366d9c09-ff45-486b-957f-abeba4ccfda0/openstack-network-exporter/0.log" Dec 03 06:52:10 crc kubenswrapper[4810]: I1203 06:52:10.812674 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovsdb-server-init/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.045413 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovsdb-server-init/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.060659 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-wz77v_ba7b0eac-f456-4a3d-b96a-b44fc348d317/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.068090 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovsdb-server/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.071160 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovs-vswitchd/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.234129 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3f9e5557-2fe8-4d2f-a663-3f015aa61b9e/openstack-network-exporter/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.239393 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3f9e5557-2fe8-4d2f-a663-3f015aa61b9e/ovn-northd/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.269594 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_907b4534-7daf-4a4f-ae5b-65d58194cabf/openstack-network-exporter/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.421865 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_907b4534-7daf-4a4f-ae5b-65d58194cabf/ovsdbserver-nb/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.434325 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b03355df-e435-4db6-8f0a-10a6618f4bfa/openstack-network-exporter/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.480427 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b03355df-e435-4db6-8f0a-10a6618f4bfa/ovsdbserver-sb/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.651962 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67f9f779cb-gh9cv_770e7dec-064e-4641-a94b-78121261d7cd/placement-api/0.log" Dec 03 06:52:11 crc kubenswrapper[4810]: I1203 06:52:11.703469 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67f9f779cb-gh9cv_770e7dec-064e-4641-a94b-78121261d7cd/placement-log/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.285337 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b4404434-bf13-4da3-a7df-d5ef032b4b67/setup-container/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.508936 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b4404434-bf13-4da3-a7df-d5ef032b4b67/setup-container/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.531460 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_743ad4f7-d246-495e-8f32-4ecf10c858bd/setup-container/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.580345 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b4404434-bf13-4da3-a7df-d5ef032b4b67/rabbitmq/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.858607 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_743ad4f7-d246-495e-8f32-4ecf10c858bd/setup-container/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.917843 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_743ad4f7-d246-495e-8f32-4ecf10c858bd/rabbitmq/0.log" Dec 03 06:52:12 crc kubenswrapper[4810]: I1203 06:52:12.947554 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c_841fc3ce-d8a7-4cb2-89ab-31cae73ce18d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.035645 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-45tnh_f7a1032e-664c-477e-93be-b363dce922bb/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.139886 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2_64d3f7de-ee70-4197-b7ba-547459e0dfef/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.142352 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-c66cp_a2097290-7aae-478a-9bd0-7d8c5a32b4d1/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.338858 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-hnbdl_0200a8b5-f03d-494e-9741-987a521ea388/ssh-known-hosts-edpm-deployment/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.394117 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b455b97b9-skz54_61ab11f4-c89b-406d-817d-f652951cf71d/proxy-server/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.462757 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b455b97b9-skz54_61ab11f4-c89b-406d-817d-f652951cf71d/proxy-httpd/0.log" Dec 03 06:52:13 crc kubenswrapper[4810]: I1203 06:52:13.972981 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-7jjx6_d10f8a77-de87-4373-862d-1c5c27744e5a/swift-ring-rebalance/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.018193 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-auditor/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.040545 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-reaper/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.196529 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-auditor/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.205236 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-replicator/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.239638 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-server/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.427609 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-replicator/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.443218 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-server/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.558955 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-updater/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.583810 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-auditor/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.609817 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-expirer/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.654214 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-replicator/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.673985 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-server/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.751132 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-updater/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.797601 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/swift-recon-cron/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.802243 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/rsync/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.897891 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-9zthf_91e736a7-e1a5-4b7c-9638-71c18367e234/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:14 crc kubenswrapper[4810]: I1203 06:52:14.998046 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f/tempest-tests-tempest-tests-runner/0.log" Dec 03 06:52:15 crc kubenswrapper[4810]: I1203 06:52:15.075377 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cae965d3-f73d-492c-8f57-f5a9e57c1d53/test-operator-logs-container/0.log" Dec 03 06:52:15 crc kubenswrapper[4810]: I1203 06:52:15.158405 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6_5f7c21ae-9d4b-4783-97da-66a73e29790a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:52:25 crc kubenswrapper[4810]: I1203 06:52:25.677491 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:52:25 crc kubenswrapper[4810]: I1203 06:52:25.677940 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:52:25 crc kubenswrapper[4810]: I1203 06:52:25.677981 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:52:25 crc kubenswrapper[4810]: I1203 06:52:25.678652 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8220f58f4f4a4254b484d5ed1eae034f8f84b205f2bd8961d3fdf0b01709c778"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:52:25 crc kubenswrapper[4810]: I1203 06:52:25.678699 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://8220f58f4f4a4254b484d5ed1eae034f8f84b205f2bd8961d3fdf0b01709c778" gracePeriod=600 Dec 03 06:52:26 crc kubenswrapper[4810]: I1203 06:52:26.461448 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="8220f58f4f4a4254b484d5ed1eae034f8f84b205f2bd8961d3fdf0b01709c778" exitCode=0 Dec 03 06:52:26 crc kubenswrapper[4810]: I1203 06:52:26.461508 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"8220f58f4f4a4254b484d5ed1eae034f8f84b205f2bd8961d3fdf0b01709c778"} Dec 03 06:52:26 crc kubenswrapper[4810]: I1203 06:52:26.462180 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38"} Dec 03 06:52:26 crc kubenswrapper[4810]: I1203 06:52:26.462219 4810 scope.go:117] "RemoveContainer" containerID="464beccc7058dd7b4d8805aab6a73e27043282dfe2b47d0c6adadd69cfae7aa2" Dec 03 06:52:30 crc kubenswrapper[4810]: I1203 06:52:30.514718 4810 scope.go:117] "RemoveContainer" containerID="8aa61b8b2b2cb4ee9dc8d50bbe76d8c6d266577e44b92c66121282fcb99e041b" Dec 03 06:52:30 crc kubenswrapper[4810]: I1203 06:52:30.556672 4810 scope.go:117] "RemoveContainer" containerID="7c40b8b21f1d3fbb01036246c95e7727e5174b02ea6ea5ff2efe1f6280f30e9f" Dec 03 06:52:30 crc kubenswrapper[4810]: I1203 06:52:30.608361 4810 scope.go:117] "RemoveContainer" containerID="466ffbba40be0eb08a8244316611a29a0eac6ea5052b94f5565fe3a709345291" Dec 03 06:52:39 crc kubenswrapper[4810]: I1203 06:52:39.717536 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/util/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.250167 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/util/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.259044 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/pull/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.288638 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/pull/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.459275 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/extract/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.469061 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/util/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.506020 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/pull/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.665910 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k8whl_93016dff-dd26-4447-bb03-244d51ba4154/kube-rbac-proxy/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.716158 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wbdfg_4b9517ac-6af4-40eb-a049-7b778dcc5f10/kube-rbac-proxy/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.726100 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k8whl_93016dff-dd26-4447-bb03-244d51ba4154/manager/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.918468 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wbdfg_4b9517ac-6af4-40eb-a049-7b778dcc5f10/manager/0.log" Dec 03 06:52:40 crc kubenswrapper[4810]: I1203 06:52:40.921135 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-mpgcv_7cf67e34-abd6-4424-95f4-7654ac840108/kube-rbac-proxy/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.064237 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-mpgcv_7cf67e34-abd6-4424-95f4-7654ac840108/manager/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.291180 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-h9f8m_af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d/kube-rbac-proxy/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.308287 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-h9f8m_af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d/manager/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.472571 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-64665_101772d6-6540-4695-a13f-ab0ce9a4bff2/kube-rbac-proxy/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.485236 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-64665_101772d6-6540-4695-a13f-ab0ce9a4bff2/manager/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.619657 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zclnr_7563ba12-e36d-48b2-8d43-57435fe85d0e/kube-rbac-proxy/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.648474 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zclnr_7563ba12-e36d-48b2-8d43-57435fe85d0e/manager/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.752767 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fdct7_4517c669-2df4-40be-bcc1-0b44fa11838d/kube-rbac-proxy/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.928721 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-pds6l_3de263f5-25e9-41a0-a51d-37317cb65b16/manager/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.972748 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-pds6l_3de263f5-25e9-41a0-a51d-37317cb65b16/kube-rbac-proxy/0.log" Dec 03 06:52:41 crc kubenswrapper[4810]: I1203 06:52:41.985923 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fdct7_4517c669-2df4-40be-bcc1-0b44fa11838d/manager/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.165888 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9hqh_b36e1f29-d534-4c72-bcac-74ffc356c086/kube-rbac-proxy/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.232915 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9hqh_b36e1f29-d534-4c72-bcac-74ffc356c086/manager/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.275109 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-pzmcl_60612556-0f2a-4999-afb7-d71b32d18ef0/kube-rbac-proxy/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.368890 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-pzmcl_60612556-0f2a-4999-afb7-d71b32d18ef0/manager/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.439864 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-km59q_68f5962b-45be-45a4-9822-eb23088d3d79/kube-rbac-proxy/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.490534 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-km59q_68f5962b-45be-45a4-9822-eb23088d3d79/manager/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.626979 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vs4m6_ac2ef7b5-018c-4775-8e14-106265e1c300/kube-rbac-proxy/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.719849 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vs4m6_ac2ef7b5-018c-4775-8e14-106265e1c300/manager/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.826032 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5lkc2_fa52e238-d025-4845-85bb-2787a7eb2ed7/kube-rbac-proxy/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.909660 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5lkc2_fa52e238-d025-4845-85bb-2787a7eb2ed7/manager/0.log" Dec 03 06:52:42 crc kubenswrapper[4810]: I1203 06:52:42.998943 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-z6tqk_660ec0b8-77cd-4cb2-9597-abca0770fbf9/kube-rbac-proxy/0.log" Dec 03 06:52:43 crc kubenswrapper[4810]: I1203 06:52:43.044209 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-z6tqk_660ec0b8-77cd-4cb2-9597-abca0770fbf9/manager/0.log" Dec 03 06:52:43 crc kubenswrapper[4810]: I1203 06:52:43.159910 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55d86b6686v7m42_7306b11b-b539-4542-af3f-a738880af67f/kube-rbac-proxy/0.log" Dec 03 06:52:43 crc kubenswrapper[4810]: I1203 06:52:43.222563 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55d86b6686v7m42_7306b11b-b539-4542-af3f-a738880af67f/manager/0.log" Dec 03 06:52:43 crc kubenswrapper[4810]: I1203 06:52:43.728403 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mmn6k_6a2591e5-764f-4e99-90d5-c0942ee5c434/registry-server/0.log" Dec 03 06:52:43 crc kubenswrapper[4810]: I1203 06:52:43.916661 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dd5c7bb7c-9ck7g_ce9634a4-b14f-4972-a2f8-3bcea4db7a43/operator/0.log" Dec 03 06:52:43 crc kubenswrapper[4810]: I1203 06:52:43.955320 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-lvc9h_70b45fee-d617-41b2-a598-eae2815e19c6/kube-rbac-proxy/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.048495 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-lvc9h_70b45fee-d617-41b2-a598-eae2815e19c6/manager/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.127017 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-2hkc4_4a806c3b-f888-4612-b979-9f57fa2adabe/kube-rbac-proxy/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.160046 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-2hkc4_4a806c3b-f888-4612-b979-9f57fa2adabe/manager/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.392129 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-7nw2d_2f985dd7-de9f-498f-a297-f0602a4888a4/operator/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.394976 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-r42sr_f8e032e8-8552-4d00-861c-798b4e59b83e/kube-rbac-proxy/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.541966 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-9f56fc979-kq5vk_a67e7123-c5dc-4392-9296-02892458e969/manager/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.567540 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-r42sr_f8e032e8-8552-4d00-861c-798b4e59b83e/manager/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.584429 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-2ktb9_13befddb-d7f2-48bb-9d8c-8e61fbd8601a/kube-rbac-proxy/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.658993 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-2ktb9_13befddb-d7f2-48bb-9d8c-8e61fbd8601a/manager/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.750707 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-md8gl_799ba5c1-1eae-4a8a-9177-454e5bcba2a5/kube-rbac-proxy/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.808012 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-md8gl_799ba5c1-1eae-4a8a-9177-454e5bcba2a5/manager/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.882631 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-hw6bt_5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462/kube-rbac-proxy/0.log" Dec 03 06:52:44 crc kubenswrapper[4810]: I1203 06:52:44.927107 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-hw6bt_5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462/manager/0.log" Dec 03 06:53:07 crc kubenswrapper[4810]: I1203 06:53:07.395669 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fvwpx_fe9221b2-2ee0-4fa7-846f-f37559bf8631/kube-rbac-proxy/0.log" Dec 03 06:53:07 crc kubenswrapper[4810]: I1203 06:53:07.405956 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wgxmb_e11bccd1-94c0-4366-9420-6295008b823d/control-plane-machine-set-operator/0.log" Dec 03 06:53:07 crc kubenswrapper[4810]: I1203 06:53:07.541905 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fvwpx_fe9221b2-2ee0-4fa7-846f-f37559bf8631/machine-api-operator/0.log" Dec 03 06:53:20 crc kubenswrapper[4810]: I1203 06:53:20.873394 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-sb4vh_30155c9d-effd-4dd4-8893-afcf98aa730d/cert-manager-controller/0.log" Dec 03 06:53:21 crc kubenswrapper[4810]: I1203 06:53:21.047599 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-9d7bx_d895f771-5889-476b-9f46-1a2930561552/cert-manager-cainjector/0.log" Dec 03 06:53:21 crc kubenswrapper[4810]: I1203 06:53:21.125538 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-cg57k_5bac17b1-b4d3-423f-8303-219d40d0c765/cert-manager-webhook/0.log" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.533294 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pzqg7"] Dec 03 06:53:34 crc kubenswrapper[4810]: E1203 06:53:34.534089 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ab3b408-6d76-4754-847a-35d5e82d8972" containerName="container-00" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.534100 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ab3b408-6d76-4754-847a-35d5e82d8972" containerName="container-00" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.534290 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ab3b408-6d76-4754-847a-35d5e82d8972" containerName="container-00" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.535579 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.572410 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzqg7"] Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.684450 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-utilities\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.684545 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfcfg\" (UniqueName: \"kubernetes.io/projected/b59c12d3-186f-465f-a77c-a1714f507af9-kube-api-access-hfcfg\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.684600 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-catalog-content\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.786178 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfcfg\" (UniqueName: \"kubernetes.io/projected/b59c12d3-186f-465f-a77c-a1714f507af9-kube-api-access-hfcfg\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.786266 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-catalog-content\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.786350 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-utilities\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.786911 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-utilities\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.786977 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-catalog-content\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.808515 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfcfg\" (UniqueName: \"kubernetes.io/projected/b59c12d3-186f-465f-a77c-a1714f507af9-kube-api-access-hfcfg\") pod \"redhat-marketplace-pzqg7\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:34 crc kubenswrapper[4810]: I1203 06:53:34.878136 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:35 crc kubenswrapper[4810]: I1203 06:53:35.445855 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzqg7"] Dec 03 06:53:35 crc kubenswrapper[4810]: I1203 06:53:35.855704 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-6drbj_aeb379d5-f663-4858-a0b5-27614ecf83e9/nmstate-console-plugin/0.log" Dec 03 06:53:35 crc kubenswrapper[4810]: I1203 06:53:35.871064 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wvjvm_a70a2c16-7c9a-45aa-b91d-7d6f44b821c6/nmstate-handler/0.log" Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.033863 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ln57t_430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b/nmstate-metrics/0.log" Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.047593 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ln57t_430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b/kube-rbac-proxy/0.log" Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.175302 4810 generic.go:334] "Generic (PLEG): container finished" podID="b59c12d3-186f-465f-a77c-a1714f507af9" containerID="4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd" exitCode=0 Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.175347 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzqg7" event={"ID":"b59c12d3-186f-465f-a77c-a1714f507af9","Type":"ContainerDied","Data":"4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd"} Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.175375 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzqg7" event={"ID":"b59c12d3-186f-465f-a77c-a1714f507af9","Type":"ContainerStarted","Data":"b95a5bdd6c4edf6433f3432f9a2fd1765e0f449dbc5fc5d8ca452a3bb2b8d4b4"} Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.279055 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-sqp7g_283887bd-09bf-4f88-81f4-efb5ade1b9de/nmstate-webhook/0.log" Dec 03 06:53:36 crc kubenswrapper[4810]: I1203 06:53:36.337651 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-ppp7c_0e12845f-64f7-48a4-8297-25f3eef40777/nmstate-operator/0.log" Dec 03 06:53:39 crc kubenswrapper[4810]: I1203 06:53:39.208219 4810 generic.go:334] "Generic (PLEG): container finished" podID="b59c12d3-186f-465f-a77c-a1714f507af9" containerID="ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266" exitCode=0 Dec 03 06:53:39 crc kubenswrapper[4810]: I1203 06:53:39.208291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzqg7" event={"ID":"b59c12d3-186f-465f-a77c-a1714f507af9","Type":"ContainerDied","Data":"ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266"} Dec 03 06:53:40 crc kubenswrapper[4810]: I1203 06:53:40.223468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzqg7" event={"ID":"b59c12d3-186f-465f-a77c-a1714f507af9","Type":"ContainerStarted","Data":"ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee"} Dec 03 06:53:41 crc kubenswrapper[4810]: I1203 06:53:41.261707 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pzqg7" podStartSLOduration=3.68488215 podStartE2EDuration="7.26168356s" podCreationTimestamp="2025-12-03 06:53:34 +0000 UTC" firstStartedPulling="2025-12-03 06:53:36.179118522 +0000 UTC m=+4340.114579363" lastFinishedPulling="2025-12-03 06:53:39.755919922 +0000 UTC m=+4343.691380773" observedRunningTime="2025-12-03 06:53:41.255457106 +0000 UTC m=+4345.190917957" watchObservedRunningTime="2025-12-03 06:53:41.26168356 +0000 UTC m=+4345.197144421" Dec 03 06:53:44 crc kubenswrapper[4810]: I1203 06:53:44.879342 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:44 crc kubenswrapper[4810]: I1203 06:53:44.879917 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:44 crc kubenswrapper[4810]: I1203 06:53:44.942900 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:45 crc kubenswrapper[4810]: I1203 06:53:45.308222 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:45 crc kubenswrapper[4810]: I1203 06:53:45.516065 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzqg7"] Dec 03 06:53:47 crc kubenswrapper[4810]: I1203 06:53:47.280547 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pzqg7" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="registry-server" containerID="cri-o://ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee" gracePeriod=2 Dec 03 06:53:47 crc kubenswrapper[4810]: I1203 06:53:47.998722 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.184409 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-utilities\") pod \"b59c12d3-186f-465f-a77c-a1714f507af9\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.185015 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-utilities" (OuterVolumeSpecName: "utilities") pod "b59c12d3-186f-465f-a77c-a1714f507af9" (UID: "b59c12d3-186f-465f-a77c-a1714f507af9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.185245 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-catalog-content\") pod \"b59c12d3-186f-465f-a77c-a1714f507af9\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.185509 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfcfg\" (UniqueName: \"kubernetes.io/projected/b59c12d3-186f-465f-a77c-a1714f507af9-kube-api-access-hfcfg\") pod \"b59c12d3-186f-465f-a77c-a1714f507af9\" (UID: \"b59c12d3-186f-465f-a77c-a1714f507af9\") " Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.186354 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.190487 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b59c12d3-186f-465f-a77c-a1714f507af9-kube-api-access-hfcfg" (OuterVolumeSpecName: "kube-api-access-hfcfg") pod "b59c12d3-186f-465f-a77c-a1714f507af9" (UID: "b59c12d3-186f-465f-a77c-a1714f507af9"). InnerVolumeSpecName "kube-api-access-hfcfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.200634 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b59c12d3-186f-465f-a77c-a1714f507af9" (UID: "b59c12d3-186f-465f-a77c-a1714f507af9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.289052 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59c12d3-186f-465f-a77c-a1714f507af9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.289090 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfcfg\" (UniqueName: \"kubernetes.io/projected/b59c12d3-186f-465f-a77c-a1714f507af9-kube-api-access-hfcfg\") on node \"crc\" DevicePath \"\"" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.290965 4810 generic.go:334] "Generic (PLEG): container finished" podID="b59c12d3-186f-465f-a77c-a1714f507af9" containerID="ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee" exitCode=0 Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.291008 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzqg7" event={"ID":"b59c12d3-186f-465f-a77c-a1714f507af9","Type":"ContainerDied","Data":"ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee"} Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.291040 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzqg7" event={"ID":"b59c12d3-186f-465f-a77c-a1714f507af9","Type":"ContainerDied","Data":"b95a5bdd6c4edf6433f3432f9a2fd1765e0f449dbc5fc5d8ca452a3bb2b8d4b4"} Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.291062 4810 scope.go:117] "RemoveContainer" containerID="ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.291292 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzqg7" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.313454 4810 scope.go:117] "RemoveContainer" containerID="ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.333661 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzqg7"] Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.347771 4810 scope.go:117] "RemoveContainer" containerID="4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.348698 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzqg7"] Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.386056 4810 scope.go:117] "RemoveContainer" containerID="ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee" Dec 03 06:53:48 crc kubenswrapper[4810]: E1203 06:53:48.386548 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee\": container with ID starting with ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee not found: ID does not exist" containerID="ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.386587 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee"} err="failed to get container status \"ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee\": rpc error: code = NotFound desc = could not find container \"ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee\": container with ID starting with ed8d6a3c776939657a2355bcfa249184c918179eca3117726d73525597dadbee not found: ID does not exist" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.386610 4810 scope.go:117] "RemoveContainer" containerID="ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266" Dec 03 06:53:48 crc kubenswrapper[4810]: E1203 06:53:48.386853 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266\": container with ID starting with ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266 not found: ID does not exist" containerID="ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.386899 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266"} err="failed to get container status \"ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266\": rpc error: code = NotFound desc = could not find container \"ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266\": container with ID starting with ca40fb5acdfc1c4c32d21176e28e01f1caa433fb809ebe48570e2067d2f17266 not found: ID does not exist" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.387040 4810 scope.go:117] "RemoveContainer" containerID="4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd" Dec 03 06:53:48 crc kubenswrapper[4810]: E1203 06:53:48.387330 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd\": container with ID starting with 4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd not found: ID does not exist" containerID="4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.387355 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd"} err="failed to get container status \"4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd\": rpc error: code = NotFound desc = could not find container \"4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd\": container with ID starting with 4e004f32abaacf238d18a64375da77da7ef7bebc05fc0d51f49dea9c54f214cd not found: ID does not exist" Dec 03 06:53:48 crc kubenswrapper[4810]: I1203 06:53:48.389683 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" path="/var/lib/kubelet/pods/b59c12d3-186f-465f-a77c-a1714f507af9/volumes" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.165357 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6nc6q_edef3e76-a314-4543-9a0b-592d76cbed2a/kube-rbac-proxy/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.217725 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6nc6q_edef3e76-a314-4543-9a0b-592d76cbed2a/controller/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.347874 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.504406 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.523163 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.541871 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.566357 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.732451 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.764138 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.780310 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.780541 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.907279 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.925695 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.950367 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 06:53:53 crc kubenswrapper[4810]: I1203 06:53:53.989070 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/controller/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.104149 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/frr-metrics/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.119974 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/kube-rbac-proxy/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.173348 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/kube-rbac-proxy-frr/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.333341 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/reloader/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.385559 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-5j7rx_861b1b72-05ca-4e91-a015-64939a072ec2/frr-k8s-webhook-server/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.631751 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-579cbb8d94-dxs7n_43572253-c54e-4ba2-b805-7a9521a015cd/manager/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.743918 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dfb784c-hjvnl_5406f1e2-94a8-4c3c-b154-e1448775314a/webhook-server/0.log" Dec 03 06:53:54 crc kubenswrapper[4810]: I1203 06:53:54.817217 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hpcxn_1bf9e275-7fc1-43d1-b95a-b19e459fda0c/kube-rbac-proxy/0.log" Dec 03 06:53:55 crc kubenswrapper[4810]: I1203 06:53:55.409282 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hpcxn_1bf9e275-7fc1-43d1-b95a-b19e459fda0c/speaker/0.log" Dec 03 06:53:55 crc kubenswrapper[4810]: I1203 06:53:55.538786 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/frr/0.log" Dec 03 06:54:08 crc kubenswrapper[4810]: I1203 06:54:08.806575 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/util/0.log" Dec 03 06:54:08 crc kubenswrapper[4810]: I1203 06:54:08.950485 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/util/0.log" Dec 03 06:54:08 crc kubenswrapper[4810]: I1203 06:54:08.968424 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/pull/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.019581 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/pull/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.142926 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/util/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.161894 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/extract/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.167264 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/pull/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.279029 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/util/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.441589 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/pull/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.452824 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/pull/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.477265 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/util/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.628826 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/util/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.646412 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/pull/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.693189 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/extract/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.825844 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-utilities/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.941844 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-utilities/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.966169 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-content/0.log" Dec 03 06:54:09 crc kubenswrapper[4810]: I1203 06:54:09.998021 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-content/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.145490 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-content/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.151594 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-utilities/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.314284 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-utilities/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.516644 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-utilities/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.607479 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-content/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.629391 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-content/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.728375 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/registry-server/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.772934 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-utilities/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.803688 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-content/0.log" Dec 03 06:54:10 crc kubenswrapper[4810]: I1203 06:54:10.954281 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xwgs5_1de26693-7bac-4062-8ed2-d7f84510be17/marketplace-operator/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.208451 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-utilities/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.397285 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-content/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.427506 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-utilities/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.432823 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/registry-server/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.471775 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-content/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.632288 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-content/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.665832 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-utilities/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.901134 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-utilities/0.log" Dec 03 06:54:11 crc kubenswrapper[4810]: I1203 06:54:11.937886 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/registry-server/0.log" Dec 03 06:54:12 crc kubenswrapper[4810]: I1203 06:54:12.073441 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-content/0.log" Dec 03 06:54:12 crc kubenswrapper[4810]: I1203 06:54:12.094568 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-utilities/0.log" Dec 03 06:54:12 crc kubenswrapper[4810]: I1203 06:54:12.124911 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-content/0.log" Dec 03 06:54:12 crc kubenswrapper[4810]: I1203 06:54:12.241223 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-content/0.log" Dec 03 06:54:12 crc kubenswrapper[4810]: I1203 06:54:12.275908 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-utilities/0.log" Dec 03 06:54:12 crc kubenswrapper[4810]: I1203 06:54:12.931236 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/registry-server/0.log" Dec 03 06:54:55 crc kubenswrapper[4810]: I1203 06:54:55.677092 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:54:55 crc kubenswrapper[4810]: I1203 06:54:55.677669 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:55:25 crc kubenswrapper[4810]: I1203 06:55:25.677974 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:55:25 crc kubenswrapper[4810]: I1203 06:55:25.678856 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:55:52 crc kubenswrapper[4810]: I1203 06:55:52.300984 4810 generic.go:334] "Generic (PLEG): container finished" podID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerID="482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487" exitCode=0 Dec 03 06:55:52 crc kubenswrapper[4810]: I1203 06:55:52.301103 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xt4vr/must-gather-g4blm" event={"ID":"101e3b8d-2028-41c8-af69-3f7fbfb31209","Type":"ContainerDied","Data":"482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487"} Dec 03 06:55:52 crc kubenswrapper[4810]: I1203 06:55:52.302157 4810 scope.go:117] "RemoveContainer" containerID="482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487" Dec 03 06:55:53 crc kubenswrapper[4810]: I1203 06:55:53.129309 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xt4vr_must-gather-g4blm_101e3b8d-2028-41c8-af69-3f7fbfb31209/gather/0.log" Dec 03 06:55:55 crc kubenswrapper[4810]: I1203 06:55:55.677171 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 06:55:55 crc kubenswrapper[4810]: I1203 06:55:55.677532 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 06:55:55 crc kubenswrapper[4810]: I1203 06:55:55.677596 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 06:55:55 crc kubenswrapper[4810]: I1203 06:55:55.678678 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 06:55:55 crc kubenswrapper[4810]: I1203 06:55:55.678812 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" gracePeriod=600 Dec 03 06:55:55 crc kubenswrapper[4810]: E1203 06:55:55.810380 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:55:56 crc kubenswrapper[4810]: I1203 06:55:56.349359 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" exitCode=0 Dec 03 06:55:56 crc kubenswrapper[4810]: I1203 06:55:56.349405 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38"} Dec 03 06:55:56 crc kubenswrapper[4810]: I1203 06:55:56.349710 4810 scope.go:117] "RemoveContainer" containerID="8220f58f4f4a4254b484d5ed1eae034f8f84b205f2bd8961d3fdf0b01709c778" Dec 03 06:55:56 crc kubenswrapper[4810]: I1203 06:55:56.350298 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:55:56 crc kubenswrapper[4810]: E1203 06:55:56.350548 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:56:00 crc kubenswrapper[4810]: I1203 06:56:00.757714 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xt4vr/must-gather-g4blm"] Dec 03 06:56:00 crc kubenswrapper[4810]: I1203 06:56:00.760250 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xt4vr/must-gather-g4blm" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="copy" containerID="cri-o://feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860" gracePeriod=2 Dec 03 06:56:00 crc kubenswrapper[4810]: I1203 06:56:00.769628 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xt4vr/must-gather-g4blm"] Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.251268 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xt4vr_must-gather-g4blm_101e3b8d-2028-41c8-af69-3f7fbfb31209/copy/0.log" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.252299 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.367492 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grvr2\" (UniqueName: \"kubernetes.io/projected/101e3b8d-2028-41c8-af69-3f7fbfb31209-kube-api-access-grvr2\") pod \"101e3b8d-2028-41c8-af69-3f7fbfb31209\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.367668 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/101e3b8d-2028-41c8-af69-3f7fbfb31209-must-gather-output\") pod \"101e3b8d-2028-41c8-af69-3f7fbfb31209\" (UID: \"101e3b8d-2028-41c8-af69-3f7fbfb31209\") " Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.376142 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/101e3b8d-2028-41c8-af69-3f7fbfb31209-kube-api-access-grvr2" (OuterVolumeSpecName: "kube-api-access-grvr2") pod "101e3b8d-2028-41c8-af69-3f7fbfb31209" (UID: "101e3b8d-2028-41c8-af69-3f7fbfb31209"). InnerVolumeSpecName "kube-api-access-grvr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.409861 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xt4vr_must-gather-g4blm_101e3b8d-2028-41c8-af69-3f7fbfb31209/copy/0.log" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.410392 4810 generic.go:334] "Generic (PLEG): container finished" podID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerID="feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860" exitCode=143 Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.410447 4810 scope.go:117] "RemoveContainer" containerID="feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.410482 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xt4vr/must-gather-g4blm" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.458889 4810 scope.go:117] "RemoveContainer" containerID="482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.470249 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grvr2\" (UniqueName: \"kubernetes.io/projected/101e3b8d-2028-41c8-af69-3f7fbfb31209-kube-api-access-grvr2\") on node \"crc\" DevicePath \"\"" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.509865 4810 scope.go:117] "RemoveContainer" containerID="feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860" Dec 03 06:56:01 crc kubenswrapper[4810]: E1203 06:56:01.510465 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860\": container with ID starting with feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860 not found: ID does not exist" containerID="feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.510548 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860"} err="failed to get container status \"feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860\": rpc error: code = NotFound desc = could not find container \"feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860\": container with ID starting with feed745f37f6d57c4696ad45c89fa14c57b2313378d49d79d44d29f940972860 not found: ID does not exist" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.510634 4810 scope.go:117] "RemoveContainer" containerID="482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487" Dec 03 06:56:01 crc kubenswrapper[4810]: E1203 06:56:01.511187 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487\": container with ID starting with 482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487 not found: ID does not exist" containerID="482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.511215 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487"} err="failed to get container status \"482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487\": rpc error: code = NotFound desc = could not find container \"482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487\": container with ID starting with 482f7036d3d369f5e21f34f2c80e9cecf13324304241c805bd6e20a7fe883487 not found: ID does not exist" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.521030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/101e3b8d-2028-41c8-af69-3f7fbfb31209-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "101e3b8d-2028-41c8-af69-3f7fbfb31209" (UID: "101e3b8d-2028-41c8-af69-3f7fbfb31209"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:56:01 crc kubenswrapper[4810]: I1203 06:56:01.571588 4810 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/101e3b8d-2028-41c8-af69-3f7fbfb31209-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 06:56:02 crc kubenswrapper[4810]: I1203 06:56:02.400380 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" path="/var/lib/kubelet/pods/101e3b8d-2028-41c8-af69-3f7fbfb31209/volumes" Dec 03 06:56:07 crc kubenswrapper[4810]: I1203 06:56:07.378374 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:56:07 crc kubenswrapper[4810]: E1203 06:56:07.379311 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.377900 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:56:22 crc kubenswrapper[4810]: E1203 06:56:22.378797 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.661569 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6s84h"] Dec 03 06:56:22 crc kubenswrapper[4810]: E1203 06:56:22.662052 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="registry-server" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662073 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="registry-server" Dec 03 06:56:22 crc kubenswrapper[4810]: E1203 06:56:22.662099 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="extract-content" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662108 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="extract-content" Dec 03 06:56:22 crc kubenswrapper[4810]: E1203 06:56:22.662127 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="gather" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662135 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="gather" Dec 03 06:56:22 crc kubenswrapper[4810]: E1203 06:56:22.662148 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="extract-utilities" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662156 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="extract-utilities" Dec 03 06:56:22 crc kubenswrapper[4810]: E1203 06:56:22.662171 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="copy" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662179 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="copy" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662408 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="copy" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662431 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="101e3b8d-2028-41c8-af69-3f7fbfb31209" containerName="gather" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.662450 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="b59c12d3-186f-465f-a77c-a1714f507af9" containerName="registry-server" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.664105 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.693203 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6s84h"] Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.714059 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5phgl\" (UniqueName: \"kubernetes.io/projected/0d441683-ddfb-4ea1-8a5f-c5870321b935-kube-api-access-5phgl\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.714123 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-utilities\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.714154 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-catalog-content\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.816218 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5phgl\" (UniqueName: \"kubernetes.io/projected/0d441683-ddfb-4ea1-8a5f-c5870321b935-kube-api-access-5phgl\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.816388 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-utilities\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.816466 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-catalog-content\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.817397 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-utilities\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.817458 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-catalog-content\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:22 crc kubenswrapper[4810]: I1203 06:56:22.840640 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5phgl\" (UniqueName: \"kubernetes.io/projected/0d441683-ddfb-4ea1-8a5f-c5870321b935-kube-api-access-5phgl\") pod \"community-operators-6s84h\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:23 crc kubenswrapper[4810]: I1203 06:56:23.024851 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:23 crc kubenswrapper[4810]: I1203 06:56:23.507715 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6s84h"] Dec 03 06:56:23 crc kubenswrapper[4810]: W1203 06:56:23.511499 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d441683_ddfb_4ea1_8a5f_c5870321b935.slice/crio-661d56cd2cd01e9e152d042adb30320eca8a46699c22e36784486ae6f8a3a23d WatchSource:0}: Error finding container 661d56cd2cd01e9e152d042adb30320eca8a46699c22e36784486ae6f8a3a23d: Status 404 returned error can't find the container with id 661d56cd2cd01e9e152d042adb30320eca8a46699c22e36784486ae6f8a3a23d Dec 03 06:56:23 crc kubenswrapper[4810]: I1203 06:56:23.622305 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerStarted","Data":"661d56cd2cd01e9e152d042adb30320eca8a46699c22e36784486ae6f8a3a23d"} Dec 03 06:56:24 crc kubenswrapper[4810]: I1203 06:56:24.635223 4810 generic.go:334] "Generic (PLEG): container finished" podID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerID="b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193" exitCode=0 Dec 03 06:56:24 crc kubenswrapper[4810]: I1203 06:56:24.635533 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerDied","Data":"b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193"} Dec 03 06:56:24 crc kubenswrapper[4810]: I1203 06:56:24.638320 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 06:56:25 crc kubenswrapper[4810]: I1203 06:56:25.645647 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerStarted","Data":"567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17"} Dec 03 06:56:26 crc kubenswrapper[4810]: I1203 06:56:26.656691 4810 generic.go:334] "Generic (PLEG): container finished" podID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerID="567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17" exitCode=0 Dec 03 06:56:26 crc kubenswrapper[4810]: I1203 06:56:26.656981 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerDied","Data":"567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17"} Dec 03 06:56:27 crc kubenswrapper[4810]: I1203 06:56:27.670229 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerStarted","Data":"13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4"} Dec 03 06:56:27 crc kubenswrapper[4810]: I1203 06:56:27.696025 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6s84h" podStartSLOduration=3.075783953 podStartE2EDuration="5.695992762s" podCreationTimestamp="2025-12-03 06:56:22 +0000 UTC" firstStartedPulling="2025-12-03 06:56:24.637812255 +0000 UTC m=+4508.573273106" lastFinishedPulling="2025-12-03 06:56:27.258021054 +0000 UTC m=+4511.193481915" observedRunningTime="2025-12-03 06:56:27.694625916 +0000 UTC m=+4511.630086767" watchObservedRunningTime="2025-12-03 06:56:27.695992762 +0000 UTC m=+4511.631453653" Dec 03 06:56:33 crc kubenswrapper[4810]: I1203 06:56:33.026054 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:33 crc kubenswrapper[4810]: I1203 06:56:33.026841 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:33 crc kubenswrapper[4810]: I1203 06:56:33.105437 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:33 crc kubenswrapper[4810]: I1203 06:56:33.773264 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:33 crc kubenswrapper[4810]: I1203 06:56:33.828757 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6s84h"] Dec 03 06:56:35 crc kubenswrapper[4810]: I1203 06:56:35.377309 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:56:35 crc kubenswrapper[4810]: E1203 06:56:35.378451 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:56:35 crc kubenswrapper[4810]: I1203 06:56:35.742463 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6s84h" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="registry-server" containerID="cri-o://13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4" gracePeriod=2 Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.242538 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.377869 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-utilities\") pod \"0d441683-ddfb-4ea1-8a5f-c5870321b935\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.378080 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-catalog-content\") pod \"0d441683-ddfb-4ea1-8a5f-c5870321b935\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.378269 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5phgl\" (UniqueName: \"kubernetes.io/projected/0d441683-ddfb-4ea1-8a5f-c5870321b935-kube-api-access-5phgl\") pod \"0d441683-ddfb-4ea1-8a5f-c5870321b935\" (UID: \"0d441683-ddfb-4ea1-8a5f-c5870321b935\") " Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.379620 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-utilities" (OuterVolumeSpecName: "utilities") pod "0d441683-ddfb-4ea1-8a5f-c5870321b935" (UID: "0d441683-ddfb-4ea1-8a5f-c5870321b935"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.385030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d441683-ddfb-4ea1-8a5f-c5870321b935-kube-api-access-5phgl" (OuterVolumeSpecName: "kube-api-access-5phgl") pod "0d441683-ddfb-4ea1-8a5f-c5870321b935" (UID: "0d441683-ddfb-4ea1-8a5f-c5870321b935"). InnerVolumeSpecName "kube-api-access-5phgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.445352 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d441683-ddfb-4ea1-8a5f-c5870321b935" (UID: "0d441683-ddfb-4ea1-8a5f-c5870321b935"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.480625 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5phgl\" (UniqueName: \"kubernetes.io/projected/0d441683-ddfb-4ea1-8a5f-c5870321b935-kube-api-access-5phgl\") on node \"crc\" DevicePath \"\"" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.480686 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.480708 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d441683-ddfb-4ea1-8a5f-c5870321b935-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.752803 4810 generic.go:334] "Generic (PLEG): container finished" podID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerID="13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4" exitCode=0 Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.752844 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerDied","Data":"13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4"} Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.752875 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6s84h" event={"ID":"0d441683-ddfb-4ea1-8a5f-c5870321b935","Type":"ContainerDied","Data":"661d56cd2cd01e9e152d042adb30320eca8a46699c22e36784486ae6f8a3a23d"} Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.752913 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6s84h" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.752916 4810 scope.go:117] "RemoveContainer" containerID="13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.782838 4810 scope.go:117] "RemoveContainer" containerID="567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.790005 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6s84h"] Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.806241 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6s84h"] Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.807420 4810 scope.go:117] "RemoveContainer" containerID="b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.845395 4810 scope.go:117] "RemoveContainer" containerID="13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4" Dec 03 06:56:36 crc kubenswrapper[4810]: E1203 06:56:36.845818 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4\": container with ID starting with 13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4 not found: ID does not exist" containerID="13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.845852 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4"} err="failed to get container status \"13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4\": rpc error: code = NotFound desc = could not find container \"13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4\": container with ID starting with 13b331e661e46ef8d4cd23f417290eca2257767103b16e3ba402941bb2dffec4 not found: ID does not exist" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.845875 4810 scope.go:117] "RemoveContainer" containerID="567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17" Dec 03 06:56:36 crc kubenswrapper[4810]: E1203 06:56:36.846137 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17\": container with ID starting with 567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17 not found: ID does not exist" containerID="567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.846160 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17"} err="failed to get container status \"567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17\": rpc error: code = NotFound desc = could not find container \"567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17\": container with ID starting with 567aa9be18e5ad10b55769813c662866aaf0724fc00deeb073bf799b66a50a17 not found: ID does not exist" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.846176 4810 scope.go:117] "RemoveContainer" containerID="b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193" Dec 03 06:56:36 crc kubenswrapper[4810]: E1203 06:56:36.846392 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193\": container with ID starting with b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193 not found: ID does not exist" containerID="b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193" Dec 03 06:56:36 crc kubenswrapper[4810]: I1203 06:56:36.846410 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193"} err="failed to get container status \"b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193\": rpc error: code = NotFound desc = could not find container \"b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193\": container with ID starting with b8b0e627af71019a3bc1ce2379106db47df898aaa57b521ac640d064dd021193 not found: ID does not exist" Dec 03 06:56:38 crc kubenswrapper[4810]: I1203 06:56:38.399840 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" path="/var/lib/kubelet/pods/0d441683-ddfb-4ea1-8a5f-c5870321b935/volumes" Dec 03 06:56:46 crc kubenswrapper[4810]: I1203 06:56:46.386685 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:56:46 crc kubenswrapper[4810]: E1203 06:56:46.387941 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:57:01 crc kubenswrapper[4810]: I1203 06:57:01.379996 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:57:01 crc kubenswrapper[4810]: E1203 06:57:01.380623 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.170694 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pxzlx"] Dec 03 06:57:11 crc kubenswrapper[4810]: E1203 06:57:11.172023 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="registry-server" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.172055 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="registry-server" Dec 03 06:57:11 crc kubenswrapper[4810]: E1203 06:57:11.172114 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="extract-content" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.172132 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="extract-content" Dec 03 06:57:11 crc kubenswrapper[4810]: E1203 06:57:11.172199 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="extract-utilities" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.172213 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="extract-utilities" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.172565 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d441683-ddfb-4ea1-8a5f-c5870321b935" containerName="registry-server" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.175008 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.196926 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pxzlx"] Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.295339 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-catalog-content\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.295501 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwmqm\" (UniqueName: \"kubernetes.io/projected/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-kube-api-access-xwmqm\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.295531 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-utilities\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.397176 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwmqm\" (UniqueName: \"kubernetes.io/projected/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-kube-api-access-xwmqm\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.397257 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-utilities\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.397321 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-catalog-content\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.398034 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-utilities\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.398074 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-catalog-content\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.424807 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwmqm\" (UniqueName: \"kubernetes.io/projected/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-kube-api-access-xwmqm\") pod \"redhat-operators-pxzlx\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:11 crc kubenswrapper[4810]: I1203 06:57:11.498036 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:12 crc kubenswrapper[4810]: I1203 06:57:12.100090 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pxzlx"] Dec 03 06:57:13 crc kubenswrapper[4810]: I1203 06:57:13.086760 4810 generic.go:334] "Generic (PLEG): container finished" podID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerID="aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db" exitCode=0 Dec 03 06:57:13 crc kubenswrapper[4810]: I1203 06:57:13.086846 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerDied","Data":"aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db"} Dec 03 06:57:13 crc kubenswrapper[4810]: I1203 06:57:13.087169 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerStarted","Data":"44ff4b1c2db4858e210c850befba766e36ff26a7e8a66a009c4be5ab90486396"} Dec 03 06:57:14 crc kubenswrapper[4810]: I1203 06:57:14.097622 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerStarted","Data":"6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12"} Dec 03 06:57:15 crc kubenswrapper[4810]: I1203 06:57:15.107474 4810 generic.go:334] "Generic (PLEG): container finished" podID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerID="6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12" exitCode=0 Dec 03 06:57:15 crc kubenswrapper[4810]: I1203 06:57:15.107522 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerDied","Data":"6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12"} Dec 03 06:57:15 crc kubenswrapper[4810]: I1203 06:57:15.379389 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:57:15 crc kubenswrapper[4810]: E1203 06:57:15.379999 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:57:16 crc kubenswrapper[4810]: I1203 06:57:16.117270 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerStarted","Data":"6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d"} Dec 03 06:57:16 crc kubenswrapper[4810]: I1203 06:57:16.149368 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pxzlx" podStartSLOduration=2.746058427 podStartE2EDuration="5.149342401s" podCreationTimestamp="2025-12-03 06:57:11 +0000 UTC" firstStartedPulling="2025-12-03 06:57:13.089419388 +0000 UTC m=+4557.024880269" lastFinishedPulling="2025-12-03 06:57:15.492703402 +0000 UTC m=+4559.428164243" observedRunningTime="2025-12-03 06:57:16.140407355 +0000 UTC m=+4560.075868206" watchObservedRunningTime="2025-12-03 06:57:16.149342401 +0000 UTC m=+4560.084803242" Dec 03 06:57:21 crc kubenswrapper[4810]: I1203 06:57:21.499459 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:21 crc kubenswrapper[4810]: I1203 06:57:21.500102 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:21 crc kubenswrapper[4810]: I1203 06:57:21.548951 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:22 crc kubenswrapper[4810]: I1203 06:57:22.219710 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:22 crc kubenswrapper[4810]: I1203 06:57:22.279921 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pxzlx"] Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.196553 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pxzlx" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="registry-server" containerID="cri-o://6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d" gracePeriod=2 Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.698502 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.806952 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-catalog-content\") pod \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.807055 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwmqm\" (UniqueName: \"kubernetes.io/projected/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-kube-api-access-xwmqm\") pod \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.807295 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-utilities\") pod \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\" (UID: \"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85\") " Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.808500 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-utilities" (OuterVolumeSpecName: "utilities") pod "95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" (UID: "95fba8b0-a0a6-49b8-8066-bb80c1d8bb85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.809123 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.812954 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-kube-api-access-xwmqm" (OuterVolumeSpecName: "kube-api-access-xwmqm") pod "95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" (UID: "95fba8b0-a0a6-49b8-8066-bb80c1d8bb85"). InnerVolumeSpecName "kube-api-access-xwmqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:57:24 crc kubenswrapper[4810]: I1203 06:57:24.911861 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwmqm\" (UniqueName: \"kubernetes.io/projected/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-kube-api-access-xwmqm\") on node \"crc\" DevicePath \"\"" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.208390 4810 generic.go:334] "Generic (PLEG): container finished" podID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerID="6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d" exitCode=0 Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.208444 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxzlx" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.208493 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerDied","Data":"6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d"} Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.208951 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxzlx" event={"ID":"95fba8b0-a0a6-49b8-8066-bb80c1d8bb85","Type":"ContainerDied","Data":"44ff4b1c2db4858e210c850befba766e36ff26a7e8a66a009c4be5ab90486396"} Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.209003 4810 scope.go:117] "RemoveContainer" containerID="6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.241520 4810 scope.go:117] "RemoveContainer" containerID="6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.279660 4810 scope.go:117] "RemoveContainer" containerID="aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.781634 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" (UID: "95fba8b0-a0a6-49b8-8066-bb80c1d8bb85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.828079 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.851518 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pxzlx"] Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.867303 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pxzlx"] Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.997847 4810 scope.go:117] "RemoveContainer" containerID="6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d" Dec 03 06:57:25 crc kubenswrapper[4810]: E1203 06:57:25.998388 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d\": container with ID starting with 6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d not found: ID does not exist" containerID="6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.998415 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d"} err="failed to get container status \"6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d\": rpc error: code = NotFound desc = could not find container \"6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d\": container with ID starting with 6debe7c07886e82d3d9af3bc189edad3407538a61a63cadc5e48c1c115c4ae2d not found: ID does not exist" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.998437 4810 scope.go:117] "RemoveContainer" containerID="6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12" Dec 03 06:57:25 crc kubenswrapper[4810]: E1203 06:57:25.998727 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12\": container with ID starting with 6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12 not found: ID does not exist" containerID="6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.998763 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12"} err="failed to get container status \"6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12\": rpc error: code = NotFound desc = could not find container \"6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12\": container with ID starting with 6993dc7f5d8abe12aa1094b0d5bb80bdcd39b42e2f96fc02dadb7c46453d1c12 not found: ID does not exist" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.998777 4810 scope.go:117] "RemoveContainer" containerID="aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db" Dec 03 06:57:25 crc kubenswrapper[4810]: E1203 06:57:25.999030 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db\": container with ID starting with aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db not found: ID does not exist" containerID="aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db" Dec 03 06:57:25 crc kubenswrapper[4810]: I1203 06:57:25.999073 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db"} err="failed to get container status \"aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db\": rpc error: code = NotFound desc = could not find container \"aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db\": container with ID starting with aa995255daf50d9aa20e770625c6afb164c65eb079ec99794c78b587847410db not found: ID does not exist" Dec 03 06:57:26 crc kubenswrapper[4810]: I1203 06:57:26.401051 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" path="/var/lib/kubelet/pods/95fba8b0-a0a6-49b8-8066-bb80c1d8bb85/volumes" Dec 03 06:57:27 crc kubenswrapper[4810]: I1203 06:57:27.377403 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:57:27 crc kubenswrapper[4810]: E1203 06:57:27.377796 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:57:30 crc kubenswrapper[4810]: I1203 06:57:30.801776 4810 scope.go:117] "RemoveContainer" containerID="19ab936503a509b3653cf0dadae58137e89827ca778bae8f03f4d1b7306cee60" Dec 03 06:57:42 crc kubenswrapper[4810]: I1203 06:57:42.385479 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:57:42 crc kubenswrapper[4810]: E1203 06:57:42.386271 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:57:53 crc kubenswrapper[4810]: I1203 06:57:53.379713 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:57:53 crc kubenswrapper[4810]: E1203 06:57:53.380925 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:58:06 crc kubenswrapper[4810]: I1203 06:58:06.383577 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:58:06 crc kubenswrapper[4810]: E1203 06:58:06.384424 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:58:21 crc kubenswrapper[4810]: I1203 06:58:21.378034 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:58:21 crc kubenswrapper[4810]: E1203 06:58:21.379217 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:58:36 crc kubenswrapper[4810]: I1203 06:58:36.386467 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:58:36 crc kubenswrapper[4810]: E1203 06:58:36.387358 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.706622 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wgrf4/must-gather-xxkv2"] Dec 03 06:58:38 crc kubenswrapper[4810]: E1203 06:58:38.707716 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="extract-utilities" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.707803 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="extract-utilities" Dec 03 06:58:38 crc kubenswrapper[4810]: E1203 06:58:38.707857 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="extract-content" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.707904 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="extract-content" Dec 03 06:58:38 crc kubenswrapper[4810]: E1203 06:58:38.707966 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="registry-server" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.708012 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="registry-server" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.708220 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="95fba8b0-a0a6-49b8-8066-bb80c1d8bb85" containerName="registry-server" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.710139 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.717505 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wgrf4"/"openshift-service-ca.crt" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.717598 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wgrf4"/"kube-root-ca.crt" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.717795 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wgrf4"/"default-dockercfg-tv8jm" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.731055 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wgrf4/must-gather-xxkv2"] Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.808232 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-must-gather-output\") pod \"must-gather-xxkv2\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.808304 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmtjd\" (UniqueName: \"kubernetes.io/projected/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-kube-api-access-jmtjd\") pod \"must-gather-xxkv2\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.909908 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmtjd\" (UniqueName: \"kubernetes.io/projected/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-kube-api-access-jmtjd\") pod \"must-gather-xxkv2\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.910308 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-must-gather-output\") pod \"must-gather-xxkv2\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:38 crc kubenswrapper[4810]: I1203 06:58:38.910976 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-must-gather-output\") pod \"must-gather-xxkv2\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:39 crc kubenswrapper[4810]: I1203 06:58:39.283543 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmtjd\" (UniqueName: \"kubernetes.io/projected/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-kube-api-access-jmtjd\") pod \"must-gather-xxkv2\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:39 crc kubenswrapper[4810]: I1203 06:58:39.329916 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 06:58:39 crc kubenswrapper[4810]: I1203 06:58:39.867284 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wgrf4/must-gather-xxkv2"] Dec 03 06:58:40 crc kubenswrapper[4810]: I1203 06:58:40.055678 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" event={"ID":"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc","Type":"ContainerStarted","Data":"a298c8c3520715b01662216771306caa1c5dbf41917f8377d07cee3893627f6a"} Dec 03 06:58:41 crc kubenswrapper[4810]: I1203 06:58:41.067896 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" event={"ID":"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc","Type":"ContainerStarted","Data":"9cb2fd8476445d1caff2de8b1c57140b57e0d0670b8669555892f93a8048159f"} Dec 03 06:58:41 crc kubenswrapper[4810]: I1203 06:58:41.068404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" event={"ID":"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc","Type":"ContainerStarted","Data":"fc50315b6f65b6d4d67f7816df90478bb1d57e7c3a3906f957ce28df770531ca"} Dec 03 06:58:41 crc kubenswrapper[4810]: I1203 06:58:41.090659 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" podStartSLOduration=3.090642034 podStartE2EDuration="3.090642034s" podCreationTimestamp="2025-12-03 06:58:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:58:41.086989838 +0000 UTC m=+4645.022450679" watchObservedRunningTime="2025-12-03 06:58:41.090642034 +0000 UTC m=+4645.026102875" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.768816 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wgrf4/crc-debug-xs7gd"] Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.770443 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.810763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6knlj\" (UniqueName: \"kubernetes.io/projected/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-kube-api-access-6knlj\") pod \"crc-debug-xs7gd\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.810851 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-host\") pod \"crc-debug-xs7gd\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.912864 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6knlj\" (UniqueName: \"kubernetes.io/projected/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-kube-api-access-6knlj\") pod \"crc-debug-xs7gd\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.912942 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-host\") pod \"crc-debug-xs7gd\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.913169 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-host\") pod \"crc-debug-xs7gd\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:43 crc kubenswrapper[4810]: I1203 06:58:43.937559 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6knlj\" (UniqueName: \"kubernetes.io/projected/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-kube-api-access-6knlj\") pod \"crc-debug-xs7gd\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:44 crc kubenswrapper[4810]: I1203 06:58:44.086185 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:45 crc kubenswrapper[4810]: I1203 06:58:45.121723 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" event={"ID":"6d6dfa07-1166-4e2f-a815-4731b2da9fa4","Type":"ContainerStarted","Data":"24b3aada9f80e63b096a0c81b308b270f15404bbcd3d7e2044c090e3d52f8e12"} Dec 03 06:58:45 crc kubenswrapper[4810]: I1203 06:58:45.121992 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" event={"ID":"6d6dfa07-1166-4e2f-a815-4731b2da9fa4","Type":"ContainerStarted","Data":"60ed24e43d80b547917414e94aa88d02836bfa305109fd155d33122d4b34adb3"} Dec 03 06:58:45 crc kubenswrapper[4810]: I1203 06:58:45.138807 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" podStartSLOduration=2.138790671 podStartE2EDuration="2.138790671s" podCreationTimestamp="2025-12-03 06:58:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 06:58:45.135958186 +0000 UTC m=+4649.071419027" watchObservedRunningTime="2025-12-03 06:58:45.138790671 +0000 UTC m=+4649.074251512" Dec 03 06:58:49 crc kubenswrapper[4810]: I1203 06:58:49.377418 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:58:49 crc kubenswrapper[4810]: E1203 06:58:49.378154 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:58:56 crc kubenswrapper[4810]: I1203 06:58:56.212225 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" event={"ID":"6d6dfa07-1166-4e2f-a815-4731b2da9fa4","Type":"ContainerDied","Data":"24b3aada9f80e63b096a0c81b308b270f15404bbcd3d7e2044c090e3d52f8e12"} Dec 03 06:58:56 crc kubenswrapper[4810]: I1203 06:58:56.212174 4810 generic.go:334] "Generic (PLEG): container finished" podID="6d6dfa07-1166-4e2f-a815-4731b2da9fa4" containerID="24b3aada9f80e63b096a0c81b308b270f15404bbcd3d7e2044c090e3d52f8e12" exitCode=0 Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.319928 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.355369 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wgrf4/crc-debug-xs7gd"] Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.366098 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wgrf4/crc-debug-xs7gd"] Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.461850 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-host\") pod \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.461983 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-host" (OuterVolumeSpecName: "host") pod "6d6dfa07-1166-4e2f-a815-4731b2da9fa4" (UID: "6d6dfa07-1166-4e2f-a815-4731b2da9fa4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.462009 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6knlj\" (UniqueName: \"kubernetes.io/projected/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-kube-api-access-6knlj\") pod \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\" (UID: \"6d6dfa07-1166-4e2f-a815-4731b2da9fa4\") " Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.462514 4810 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-host\") on node \"crc\" DevicePath \"\"" Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.473400 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-kube-api-access-6knlj" (OuterVolumeSpecName: "kube-api-access-6knlj") pod "6d6dfa07-1166-4e2f-a815-4731b2da9fa4" (UID: "6d6dfa07-1166-4e2f-a815-4731b2da9fa4"). InnerVolumeSpecName "kube-api-access-6knlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:58:57 crc kubenswrapper[4810]: I1203 06:58:57.563770 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6knlj\" (UniqueName: \"kubernetes.io/projected/6d6dfa07-1166-4e2f-a815-4731b2da9fa4-kube-api-access-6knlj\") on node \"crc\" DevicePath \"\"" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.232606 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60ed24e43d80b547917414e94aa88d02836bfa305109fd155d33122d4b34adb3" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.232663 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-xs7gd" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.389425 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d6dfa07-1166-4e2f-a815-4731b2da9fa4" path="/var/lib/kubelet/pods/6d6dfa07-1166-4e2f-a815-4731b2da9fa4/volumes" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.592688 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wgrf4/crc-debug-n7dss"] Dec 03 06:58:58 crc kubenswrapper[4810]: E1203 06:58:58.593142 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6dfa07-1166-4e2f-a815-4731b2da9fa4" containerName="container-00" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.593165 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6dfa07-1166-4e2f-a815-4731b2da9fa4" containerName="container-00" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.593400 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d6dfa07-1166-4e2f-a815-4731b2da9fa4" containerName="container-00" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.594162 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.683354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0d1c435-4566-42dc-930c-8d8d9b748ec0-host\") pod \"crc-debug-n7dss\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.683572 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cffz9\" (UniqueName: \"kubernetes.io/projected/d0d1c435-4566-42dc-930c-8d8d9b748ec0-kube-api-access-cffz9\") pod \"crc-debug-n7dss\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.784814 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cffz9\" (UniqueName: \"kubernetes.io/projected/d0d1c435-4566-42dc-930c-8d8d9b748ec0-kube-api-access-cffz9\") pod \"crc-debug-n7dss\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.784924 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0d1c435-4566-42dc-930c-8d8d9b748ec0-host\") pod \"crc-debug-n7dss\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.784994 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0d1c435-4566-42dc-930c-8d8d9b748ec0-host\") pod \"crc-debug-n7dss\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.804599 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cffz9\" (UniqueName: \"kubernetes.io/projected/d0d1c435-4566-42dc-930c-8d8d9b748ec0-kube-api-access-cffz9\") pod \"crc-debug-n7dss\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:58 crc kubenswrapper[4810]: I1203 06:58:58.909499 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:58:59 crc kubenswrapper[4810]: I1203 06:58:59.241387 4810 generic.go:334] "Generic (PLEG): container finished" podID="d0d1c435-4566-42dc-930c-8d8d9b748ec0" containerID="479787f8b8f85310d9c954366962cf07396e73b55d33c2c5e783cdd0488f0046" exitCode=1 Dec 03 06:58:59 crc kubenswrapper[4810]: I1203 06:58:59.241425 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/crc-debug-n7dss" event={"ID":"d0d1c435-4566-42dc-930c-8d8d9b748ec0","Type":"ContainerDied","Data":"479787f8b8f85310d9c954366962cf07396e73b55d33c2c5e783cdd0488f0046"} Dec 03 06:58:59 crc kubenswrapper[4810]: I1203 06:58:59.241448 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/crc-debug-n7dss" event={"ID":"d0d1c435-4566-42dc-930c-8d8d9b748ec0","Type":"ContainerStarted","Data":"ebd417579bbd1a66f076efc59c60721f451eb2d57db20698037f31edb4c4cb2a"} Dec 03 06:58:59 crc kubenswrapper[4810]: I1203 06:58:59.291067 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wgrf4/crc-debug-n7dss"] Dec 03 06:58:59 crc kubenswrapper[4810]: I1203 06:58:59.299954 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wgrf4/crc-debug-n7dss"] Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.350273 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.517619 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cffz9\" (UniqueName: \"kubernetes.io/projected/d0d1c435-4566-42dc-930c-8d8d9b748ec0-kube-api-access-cffz9\") pod \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.517672 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0d1c435-4566-42dc-930c-8d8d9b748ec0-host\") pod \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\" (UID: \"d0d1c435-4566-42dc-930c-8d8d9b748ec0\") " Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.517825 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0d1c435-4566-42dc-930c-8d8d9b748ec0-host" (OuterVolumeSpecName: "host") pod "d0d1c435-4566-42dc-930c-8d8d9b748ec0" (UID: "d0d1c435-4566-42dc-930c-8d8d9b748ec0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.518719 4810 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0d1c435-4566-42dc-930c-8d8d9b748ec0-host\") on node \"crc\" DevicePath \"\"" Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.522961 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d1c435-4566-42dc-930c-8d8d9b748ec0-kube-api-access-cffz9" (OuterVolumeSpecName: "kube-api-access-cffz9") pod "d0d1c435-4566-42dc-930c-8d8d9b748ec0" (UID: "d0d1c435-4566-42dc-930c-8d8d9b748ec0"). InnerVolumeSpecName "kube-api-access-cffz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 06:59:00 crc kubenswrapper[4810]: I1203 06:59:00.621025 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cffz9\" (UniqueName: \"kubernetes.io/projected/d0d1c435-4566-42dc-930c-8d8d9b748ec0-kube-api-access-cffz9\") on node \"crc\" DevicePath \"\"" Dec 03 06:59:01 crc kubenswrapper[4810]: I1203 06:59:01.264086 4810 scope.go:117] "RemoveContainer" containerID="479787f8b8f85310d9c954366962cf07396e73b55d33c2c5e783cdd0488f0046" Dec 03 06:59:01 crc kubenswrapper[4810]: I1203 06:59:01.264292 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/crc-debug-n7dss" Dec 03 06:59:02 crc kubenswrapper[4810]: I1203 06:59:02.389834 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0d1c435-4566-42dc-930c-8d8d9b748ec0" path="/var/lib/kubelet/pods/d0d1c435-4566-42dc-930c-8d8d9b748ec0/volumes" Dec 03 06:59:04 crc kubenswrapper[4810]: I1203 06:59:04.377913 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:59:04 crc kubenswrapper[4810]: E1203 06:59:04.378468 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:59:16 crc kubenswrapper[4810]: I1203 06:59:16.391837 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:59:16 crc kubenswrapper[4810]: E1203 06:59:16.393164 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:59:28 crc kubenswrapper[4810]: I1203 06:59:28.378057 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:59:28 crc kubenswrapper[4810]: E1203 06:59:28.378879 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:59:42 crc kubenswrapper[4810]: I1203 06:59:42.378430 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:59:42 crc kubenswrapper[4810]: E1203 06:59:42.379088 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:59:49 crc kubenswrapper[4810]: I1203 06:59:49.719041 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c9578688-qf8tw_3554de12-10d1-48a4-a17e-d5ce9955fa9c/barbican-api/0.log" Dec 03 06:59:49 crc kubenswrapper[4810]: I1203 06:59:49.887512 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c9578688-qf8tw_3554de12-10d1-48a4-a17e-d5ce9955fa9c/barbican-api-log/0.log" Dec 03 06:59:49 crc kubenswrapper[4810]: I1203 06:59:49.971152 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75bb559794-4gn48_3260c501-1348-49f4-8182-437086a5649e/barbican-keystone-listener/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.036352 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75bb559794-4gn48_3260c501-1348-49f4-8182-437086a5649e/barbican-keystone-listener-log/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.160992 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-856ff6b4bf-thl85_9d889657-930a-4878-8727-91b0ab50723c/barbican-worker/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.205933 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-856ff6b4bf-thl85_9d889657-930a-4878-8727-91b0ab50723c/barbican-worker-log/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.397217 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-f489j_4dc1bd47-9cbd-4849-b466-bf72ec92cf14/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.458989 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/ceilometer-central-agent/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.536947 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/proxy-httpd/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.548207 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/ceilometer-notification-agent/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.584212 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_8e439d29-4442-41b5-94ba-a8fb7f77d5f0/sg-core/0.log" Dec 03 06:59:50 crc kubenswrapper[4810]: I1203 06:59:50.769781 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca/cinder-api-log/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.055845 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_d2130a28-40ad-4938-a265-8114fbcf38a1/cinder-scheduler/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.084462 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_d2130a28-40ad-4938-a265-8114fbcf38a1/probe/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.107036 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3fc6e5ae-bee7-43fe-b8ff-d82f7f3664ca/cinder-api/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.214701 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jdl7k_4e5495ed-5a81-4b95-bb3e-9f1ce58ce07f/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.297645 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-x76jl_b303053e-72d8-44d5-8766-d83b7fcba87a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.411297 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-797f4ccc47-h492p_c2e2cdc0-2bb3-450f-b42d-8bfeee479f46/init/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.625967 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-797f4ccc47-h492p_c2e2cdc0-2bb3-450f-b42d-8bfeee479f46/init/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.667266 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-797f4ccc47-h492p_c2e2cdc0-2bb3-450f-b42d-8bfeee479f46/dnsmasq-dns/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.694289 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-m2mpf_15aa89be-fc4f-4965-99f0-3eb7bce02b10/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.864980 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2f6dafcd-7f4b-47a9-b5ae-be22f6c84491/glance-log/0.log" Dec 03 06:59:51 crc kubenswrapper[4810]: I1203 06:59:51.873434 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2f6dafcd-7f4b-47a9-b5ae-be22f6c84491/glance-httpd/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.022802 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6b4868fe-4bdb-492d-bbb1-94d2793b41eb/glance-httpd/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.044470 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6b4868fe-4bdb-492d-bbb1-94d2793b41eb/glance-log/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.094494 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-lkhfp_ee03c5ac-6b76-4852-b07f-b73140f037dd/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.282805 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-6kpml_04e18cd1-868a-4d9c-882a-c1af0ef1f4dc/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.502850 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29412361-hn96l_3a8164cf-937e-4f52-a03e-00708ad12ebb/keystone-cron/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.699295 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-69cffb6c97-gskt7_16e8ad53-6c83-4176-94d2-e37a0ff234e2/keystone-api/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.719229 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_8d045016-8932-4293-9f53-71663d354934/kube-state-metrics/0.log" Dec 03 06:59:52 crc kubenswrapper[4810]: I1203 06:59:52.894157 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-9lrzt_d19e7058-371b-4ac9-811a-949bc24e8b03/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:53 crc kubenswrapper[4810]: I1203 06:59:53.134647 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c4f7dfb9-9f84-4331-ad73-4b4ffa8120b4/memcached/0.log" Dec 03 06:59:53 crc kubenswrapper[4810]: I1203 06:59:53.210168 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-74c9fd966f-8fc7k_411b48fc-bafd-47a2-8bd6-c31e2132b09f/neutron-api/0.log" Dec 03 06:59:53 crc kubenswrapper[4810]: I1203 06:59:53.225207 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-74c9fd966f-8fc7k_411b48fc-bafd-47a2-8bd6-c31e2132b09f/neutron-httpd/0.log" Dec 03 06:59:53 crc kubenswrapper[4810]: I1203 06:59:53.318116 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-nzwfl_201a4b8d-5ecb-4cc4-bacb-51d499efb485/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:53 crc kubenswrapper[4810]: I1203 06:59:53.923924 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_51f2670f-1de2-4383-a6a9-ea85a63a7586/nova-cell0-conductor-conductor/0.log" Dec 03 06:59:53 crc kubenswrapper[4810]: I1203 06:59:53.977242 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c/nova-api-log/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.131344 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_70eeea83-764d-4b0f-be6a-74c31a35c455/nova-cell1-conductor-conductor/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.282212 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_35ba7fd0-41b1-4669-8cb7-5538b4ef5492/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.336188 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-fmtpt_52125ad1-c593-45bd-b8d0-9a46aa72f614/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.373296 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ae02eee1-9f2a-48c3-8b5e-79e4e6b5bc3c/nova-api-api/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.457054 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_75e0323d-ca57-4a7e-a883-35da97b7e9d7/nova-metadata-log/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.722146 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8d3f680-ee84-4492-8d18-278d088e1332/mysql-bootstrap/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.771223 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_788c2be7-daf0-4cb1-9d7e-0f351e348603/nova-scheduler-scheduler/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.959212 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8d3f680-ee84-4492-8d18-278d088e1332/mysql-bootstrap/0.log" Dec 03 06:59:54 crc kubenswrapper[4810]: I1203 06:59:54.987062 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b0b5ee63-b0f3-4133-a294-69ed680c5374/mysql-bootstrap/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.002810 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8d3f680-ee84-4492-8d18-278d088e1332/galera/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.177449 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_db524b8c-98e9-41bf-be3f-5376226012e4/openstackclient/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.184060 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b0b5ee63-b0f3-4133-a294-69ed680c5374/mysql-bootstrap/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.190663 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b0b5ee63-b0f3-4133-a294-69ed680c5374/galera/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.376960 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 06:59:55 crc kubenswrapper[4810]: E1203 06:59:55.377208 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.447633 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4x8tn_0df96f16-d193-4ecc-a624-e721c61a42af/ovn-controller/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.468097 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-hthr4_366d9c09-ff45-486b-957f-abeba4ccfda0/openstack-network-exporter/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.602998 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovsdb-server-init/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.613194 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_75e0323d-ca57-4a7e-a883-35da97b7e9d7/nova-metadata-metadata/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.814553 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovs-vswitchd/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.820289 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovsdb-server-init/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.827367 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qd85b_fe7b8456-b2a9-44b7-b00b-320854a4c571/ovsdb-server/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.885896 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-wz77v_ba7b0eac-f456-4a3d-b96a-b44fc348d317/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:55 crc kubenswrapper[4810]: I1203 06:59:55.990327 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3f9e5557-2fe8-4d2f-a663-3f015aa61b9e/openstack-network-exporter/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.021699 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3f9e5557-2fe8-4d2f-a663-3f015aa61b9e/ovn-northd/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.073006 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_907b4534-7daf-4a4f-ae5b-65d58194cabf/openstack-network-exporter/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.145178 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_907b4534-7daf-4a4f-ae5b-65d58194cabf/ovsdbserver-nb/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.261276 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b03355df-e435-4db6-8f0a-10a6618f4bfa/openstack-network-exporter/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.286804 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b03355df-e435-4db6-8f0a-10a6618f4bfa/ovsdbserver-sb/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.426550 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67f9f779cb-gh9cv_770e7dec-064e-4641-a94b-78121261d7cd/placement-api/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.546354 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b4404434-bf13-4da3-a7df-d5ef032b4b67/setup-container/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.553208 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67f9f779cb-gh9cv_770e7dec-064e-4641-a94b-78121261d7cd/placement-log/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.661333 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b4404434-bf13-4da3-a7df-d5ef032b4b67/rabbitmq/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.662676 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b4404434-bf13-4da3-a7df-d5ef032b4b67/setup-container/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.759910 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_743ad4f7-d246-495e-8f32-4ecf10c858bd/setup-container/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.928436 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_743ad4f7-d246-495e-8f32-4ecf10c858bd/rabbitmq/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.938524 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_743ad4f7-d246-495e-8f32-4ecf10c858bd/setup-container/0.log" Dec 03 06:59:56 crc kubenswrapper[4810]: I1203 06:59:56.960524 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-h6v6c_841fc3ce-d8a7-4cb2-89ab-31cae73ce18d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.142053 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-45tnh_f7a1032e-664c-477e-93be-b363dce922bb/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.176713 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-s8mt2_64d3f7de-ee70-4197-b7ba-547459e0dfef/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.216332 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-c66cp_a2097290-7aae-478a-9bd0-7d8c5a32b4d1/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.306098 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-hnbdl_0200a8b5-f03d-494e-9741-987a521ea388/ssh-known-hosts-edpm-deployment/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.480136 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b455b97b9-skz54_61ab11f4-c89b-406d-817d-f652951cf71d/proxy-server/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.505885 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b455b97b9-skz54_61ab11f4-c89b-406d-817d-f652951cf71d/proxy-httpd/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.577244 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-7jjx6_d10f8a77-de87-4373-862d-1c5c27744e5a/swift-ring-rebalance/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.679919 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-auditor/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.733028 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-reaper/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.806820 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-replicator/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.820352 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-auditor/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.838780 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/account-server/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.903317 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-replicator/0.log" Dec 03 06:59:57 crc kubenswrapper[4810]: I1203 06:59:57.957168 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-server/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.002178 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/container-updater/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.016918 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-expirer/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.029690 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-auditor/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.653496 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-updater/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.692226 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/swift-recon-cron/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.692265 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/rsync/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.716631 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-replicator/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.731467 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_229a32b8-4f61-4370-afc5-a5d2ddaf1dc8/object-server/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.913562 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-9zthf_91e736a7-e1a5-4b7c-9638-71c18367e234/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 06:59:58 crc kubenswrapper[4810]: I1203 06:59:58.938059 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_369e9073-4fa1-48fc-a1ba-d9ec0aa0d74f/tempest-tests-tempest-tests-runner/0.log" Dec 03 06:59:59 crc kubenswrapper[4810]: I1203 06:59:59.057142 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cae965d3-f73d-492c-8f57-f5a9e57c1d53/test-operator-logs-container/0.log" Dec 03 06:59:59 crc kubenswrapper[4810]: I1203 06:59:59.087437 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-vx7d6_5f7c21ae-9d4b-4783-97da-66a73e29790a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.209442 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw"] Dec 03 07:00:00 crc kubenswrapper[4810]: E1203 07:00:00.210094 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d1c435-4566-42dc-930c-8d8d9b748ec0" containerName="container-00" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.210122 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d1c435-4566-42dc-930c-8d8d9b748ec0" containerName="container-00" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.210444 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d1c435-4566-42dc-930c-8d8d9b748ec0" containerName="container-00" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.211537 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.215093 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.215723 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.224296 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw"] Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.360266 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8aabf11d-12b3-4d52-bbf2-98ea6886227c-secret-volume\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.360997 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aabf11d-12b3-4d52-bbf2-98ea6886227c-config-volume\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.361226 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w66q4\" (UniqueName: \"kubernetes.io/projected/8aabf11d-12b3-4d52-bbf2-98ea6886227c-kube-api-access-w66q4\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.462670 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w66q4\" (UniqueName: \"kubernetes.io/projected/8aabf11d-12b3-4d52-bbf2-98ea6886227c-kube-api-access-w66q4\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.463073 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8aabf11d-12b3-4d52-bbf2-98ea6886227c-secret-volume\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.463281 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aabf11d-12b3-4d52-bbf2-98ea6886227c-config-volume\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.464262 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aabf11d-12b3-4d52-bbf2-98ea6886227c-config-volume\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.469714 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8aabf11d-12b3-4d52-bbf2-98ea6886227c-secret-volume\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.490428 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w66q4\" (UniqueName: \"kubernetes.io/projected/8aabf11d-12b3-4d52-bbf2-98ea6886227c-kube-api-access-w66q4\") pod \"collect-profiles-29412420-rj4zw\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:00 crc kubenswrapper[4810]: I1203 07:00:00.547940 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:01 crc kubenswrapper[4810]: I1203 07:00:01.040694 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw"] Dec 03 07:00:01 crc kubenswrapper[4810]: I1203 07:00:01.871773 4810 generic.go:334] "Generic (PLEG): container finished" podID="8aabf11d-12b3-4d52-bbf2-98ea6886227c" containerID="0e70d1c80b60fd9ce39bd315a29630e67100006e64bddb27024735625205e7a9" exitCode=0 Dec 03 07:00:01 crc kubenswrapper[4810]: I1203 07:00:01.872027 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" event={"ID":"8aabf11d-12b3-4d52-bbf2-98ea6886227c","Type":"ContainerDied","Data":"0e70d1c80b60fd9ce39bd315a29630e67100006e64bddb27024735625205e7a9"} Dec 03 07:00:01 crc kubenswrapper[4810]: I1203 07:00:01.872077 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" event={"ID":"8aabf11d-12b3-4d52-bbf2-98ea6886227c","Type":"ContainerStarted","Data":"6078f2925d824ff589d3f991c9cb9d46564cca80ae77a2459672c18277febca2"} Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.229036 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.318444 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8aabf11d-12b3-4d52-bbf2-98ea6886227c-secret-volume\") pod \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.318596 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aabf11d-12b3-4d52-bbf2-98ea6886227c-config-volume\") pod \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.318642 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w66q4\" (UniqueName: \"kubernetes.io/projected/8aabf11d-12b3-4d52-bbf2-98ea6886227c-kube-api-access-w66q4\") pod \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\" (UID: \"8aabf11d-12b3-4d52-bbf2-98ea6886227c\") " Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.319257 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aabf11d-12b3-4d52-bbf2-98ea6886227c-config-volume" (OuterVolumeSpecName: "config-volume") pod "8aabf11d-12b3-4d52-bbf2-98ea6886227c" (UID: "8aabf11d-12b3-4d52-bbf2-98ea6886227c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.323802 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aabf11d-12b3-4d52-bbf2-98ea6886227c-kube-api-access-w66q4" (OuterVolumeSpecName: "kube-api-access-w66q4") pod "8aabf11d-12b3-4d52-bbf2-98ea6886227c" (UID: "8aabf11d-12b3-4d52-bbf2-98ea6886227c"). InnerVolumeSpecName "kube-api-access-w66q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.330305 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aabf11d-12b3-4d52-bbf2-98ea6886227c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8aabf11d-12b3-4d52-bbf2-98ea6886227c" (UID: "8aabf11d-12b3-4d52-bbf2-98ea6886227c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.420979 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8aabf11d-12b3-4d52-bbf2-98ea6886227c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.421023 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8aabf11d-12b3-4d52-bbf2-98ea6886227c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.421040 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w66q4\" (UniqueName: \"kubernetes.io/projected/8aabf11d-12b3-4d52-bbf2-98ea6886227c-kube-api-access-w66q4\") on node \"crc\" DevicePath \"\"" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.887308 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" event={"ID":"8aabf11d-12b3-4d52-bbf2-98ea6886227c","Type":"ContainerDied","Data":"6078f2925d824ff589d3f991c9cb9d46564cca80ae77a2459672c18277febca2"} Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.887545 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6078f2925d824ff589d3f991c9cb9d46564cca80ae77a2459672c18277febca2" Dec 03 07:00:03 crc kubenswrapper[4810]: I1203 07:00:03.887384 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412420-rj4zw" Dec 03 07:00:04 crc kubenswrapper[4810]: I1203 07:00:04.303333 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp"] Dec 03 07:00:04 crc kubenswrapper[4810]: I1203 07:00:04.313483 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412375-g9zcp"] Dec 03 07:00:04 crc kubenswrapper[4810]: I1203 07:00:04.386608 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6705d396-c70c-4160-83b5-ded3ceef591e" path="/var/lib/kubelet/pods/6705d396-c70c-4160-83b5-ded3ceef591e/volumes" Dec 03 07:00:10 crc kubenswrapper[4810]: I1203 07:00:10.378297 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 07:00:10 crc kubenswrapper[4810]: E1203 07:00:10.378964 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 07:00:16 crc kubenswrapper[4810]: I1203 07:00:16.867988 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" podUID="fa52e238-d025-4845-85bb-2787a7eb2ed7" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.80:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:00:16 crc kubenswrapper[4810]: I1203 07:00:16.869052 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5lkc2" podUID="fa52e238-d025-4845-85bb-2787a7eb2ed7" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.80:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:00:17 crc kubenswrapper[4810]: I1203 07:00:17.208186 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vs4m6" podUID="ac2ef7b5-018c-4775-8e14-106265e1c300" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:00:17 crc kubenswrapper[4810]: I1203 07:00:17.221325 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k8whl" podUID="93016dff-dd26-4447-bb03-244d51ba4154" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.58:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:00:17 crc kubenswrapper[4810]: I1203 07:00:17.225435 4810 patch_prober.go:28] interesting pod/openshift-kube-scheduler-crc container/kube-scheduler namespace/openshift-kube-scheduler: Liveness probe status=failure output="Get \"https://192.168.126.11:10259/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 07:00:17 crc kubenswrapper[4810]: I1203 07:00:17.225507 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podUID="3dcd261975c3d6b9a6ad6367fd4facd3" containerName="kube-scheduler" probeResult="failure" output="Get \"https://192.168.126.11:10259/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:00:22 crc kubenswrapper[4810]: I1203 07:00:22.377326 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 07:00:22 crc kubenswrapper[4810]: E1203 07:00:22.377986 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.426983 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/util/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.596693 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/util/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.601383 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/pull/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.614469 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/pull/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.769304 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/pull/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.781558 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/extract/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.796575 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864dpl7r_6e9cc58b-08e2-4bf4-be95-0b3c437559a1/util/0.log" Dec 03 07:00:25 crc kubenswrapper[4810]: I1203 07:00:25.937438 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k8whl_93016dff-dd26-4447-bb03-244d51ba4154/kube-rbac-proxy/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.010229 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wbdfg_4b9517ac-6af4-40eb-a049-7b778dcc5f10/kube-rbac-proxy/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.014333 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k8whl_93016dff-dd26-4447-bb03-244d51ba4154/manager/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.139665 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wbdfg_4b9517ac-6af4-40eb-a049-7b778dcc5f10/manager/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.172006 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-mpgcv_7cf67e34-abd6-4424-95f4-7654ac840108/kube-rbac-proxy/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.188561 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-mpgcv_7cf67e34-abd6-4424-95f4-7654ac840108/manager/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.320915 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-h9f8m_af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d/kube-rbac-proxy/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.392375 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-h9f8m_af3ca9ee-5379-49c7-9dd4-c4ab0b1c9d5d/manager/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.505539 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-64665_101772d6-6540-4695-a13f-ab0ce9a4bff2/kube-rbac-proxy/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.538000 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zclnr_7563ba12-e36d-48b2-8d43-57435fe85d0e/kube-rbac-proxy/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.542037 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-64665_101772d6-6540-4695-a13f-ab0ce9a4bff2/manager/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.869171 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zclnr_7563ba12-e36d-48b2-8d43-57435fe85d0e/manager/0.log" Dec 03 07:00:26 crc kubenswrapper[4810]: I1203 07:00:26.909940 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fdct7_4517c669-2df4-40be-bcc1-0b44fa11838d/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.049845 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fdct7_4517c669-2df4-40be-bcc1-0b44fa11838d/manager/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.068155 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-pds6l_3de263f5-25e9-41a0-a51d-37317cb65b16/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.100900 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-pds6l_3de263f5-25e9-41a0-a51d-37317cb65b16/manager/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.226625 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9hqh_b36e1f29-d534-4c72-bcac-74ffc356c086/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.310558 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9hqh_b36e1f29-d534-4c72-bcac-74ffc356c086/manager/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.415653 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-pzmcl_60612556-0f2a-4999-afb7-d71b32d18ef0/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.451017 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-pzmcl_60612556-0f2a-4999-afb7-d71b32d18ef0/manager/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.532422 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-km59q_68f5962b-45be-45a4-9822-eb23088d3d79/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.603231 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-km59q_68f5962b-45be-45a4-9822-eb23088d3d79/manager/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.711368 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vs4m6_ac2ef7b5-018c-4775-8e14-106265e1c300/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.755450 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vs4m6_ac2ef7b5-018c-4775-8e14-106265e1c300/manager/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.978301 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5lkc2_fa52e238-d025-4845-85bb-2787a7eb2ed7/kube-rbac-proxy/0.log" Dec 03 07:00:27 crc kubenswrapper[4810]: I1203 07:00:27.994973 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5lkc2_fa52e238-d025-4845-85bb-2787a7eb2ed7/manager/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.027040 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-z6tqk_660ec0b8-77cd-4cb2-9597-abca0770fbf9/kube-rbac-proxy/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.171118 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-z6tqk_660ec0b8-77cd-4cb2-9597-abca0770fbf9/manager/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.192660 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55d86b6686v7m42_7306b11b-b539-4542-af3f-a738880af67f/kube-rbac-proxy/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.229270 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55d86b6686v7m42_7306b11b-b539-4542-af3f-a738880af67f/manager/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.634197 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dd5c7bb7c-9ck7g_ce9634a4-b14f-4972-a2f8-3bcea4db7a43/operator/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.711015 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mmn6k_6a2591e5-764f-4e99-90d5-c0942ee5c434/registry-server/0.log" Dec 03 07:00:28 crc kubenswrapper[4810]: I1203 07:00:28.817410 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-lvc9h_70b45fee-d617-41b2-a598-eae2815e19c6/kube-rbac-proxy/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.009374 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-lvc9h_70b45fee-d617-41b2-a598-eae2815e19c6/manager/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.062788 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-2hkc4_4a806c3b-f888-4612-b979-9f57fa2adabe/manager/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.076386 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-2hkc4_4a806c3b-f888-4612-b979-9f57fa2adabe/kube-rbac-proxy/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.321458 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-7nw2d_2f985dd7-de9f-498f-a297-f0602a4888a4/operator/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.341234 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-r42sr_f8e032e8-8552-4d00-861c-798b4e59b83e/kube-rbac-proxy/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.503497 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-r42sr_f8e032e8-8552-4d00-861c-798b4e59b83e/manager/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.551593 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-2ktb9_13befddb-d7f2-48bb-9d8c-8e61fbd8601a/kube-rbac-proxy/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.566751 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-9f56fc979-kq5vk_a67e7123-c5dc-4392-9296-02892458e969/manager/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.638760 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-2ktb9_13befddb-d7f2-48bb-9d8c-8e61fbd8601a/manager/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.752123 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-md8gl_799ba5c1-1eae-4a8a-9177-454e5bcba2a5/manager/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.769663 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-md8gl_799ba5c1-1eae-4a8a-9177-454e5bcba2a5/kube-rbac-proxy/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.826767 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-hw6bt_5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462/kube-rbac-proxy/0.log" Dec 03 07:00:29 crc kubenswrapper[4810]: I1203 07:00:29.911944 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-hw6bt_5a9a43f0-e1ee-41ce-ab07-ab9f9abe5462/manager/0.log" Dec 03 07:00:30 crc kubenswrapper[4810]: I1203 07:00:30.972660 4810 scope.go:117] "RemoveContainer" containerID="938246c49c78e60dd21dde994d0a564ebca4d10ff16861206703cea066c37df8" Dec 03 07:00:37 crc kubenswrapper[4810]: I1203 07:00:37.378129 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 07:00:37 crc kubenswrapper[4810]: E1203 07:00:37.379118 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 07:00:48 crc kubenswrapper[4810]: I1203 07:00:48.378157 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 07:00:48 crc kubenswrapper[4810]: E1203 07:00:48.378911 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2hd85_openshift-machine-config-operator(bc7906ed-7d0a-444b-8e14-12c67bc3301e)\"" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" Dec 03 07:00:48 crc kubenswrapper[4810]: I1203 07:00:48.749384 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wgxmb_e11bccd1-94c0-4366-9420-6295008b823d/control-plane-machine-set-operator/0.log" Dec 03 07:00:48 crc kubenswrapper[4810]: I1203 07:00:48.916561 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fvwpx_fe9221b2-2ee0-4fa7-846f-f37559bf8631/kube-rbac-proxy/0.log" Dec 03 07:00:48 crc kubenswrapper[4810]: I1203 07:00:48.935584 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fvwpx_fe9221b2-2ee0-4fa7-846f-f37559bf8631/machine-api-operator/0.log" Dec 03 07:00:59 crc kubenswrapper[4810]: I1203 07:00:59.377136 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 07:00:59 crc kubenswrapper[4810]: I1203 07:00:59.679269 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"dbcf107a210ea6b30b33641897e4e8e02bd8409638d7aa3bcc20f85c3b6914da"} Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.157870 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29412421-9bc25"] Dec 03 07:01:00 crc kubenswrapper[4810]: E1203 07:01:00.158481 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aabf11d-12b3-4d52-bbf2-98ea6886227c" containerName="collect-profiles" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.158517 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aabf11d-12b3-4d52-bbf2-98ea6886227c" containerName="collect-profiles" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.158873 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aabf11d-12b3-4d52-bbf2-98ea6886227c" containerName="collect-profiles" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.160043 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.173417 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412421-9bc25"] Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.288348 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-combined-ca-bundle\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.288410 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-config-data\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.288468 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjkst\" (UniqueName: \"kubernetes.io/projected/9b3dad17-c011-4c7c-b991-b3178354c467-kube-api-access-bjkst\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.288529 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-fernet-keys\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.390638 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-combined-ca-bundle\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.390702 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-config-data\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.390777 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjkst\" (UniqueName: \"kubernetes.io/projected/9b3dad17-c011-4c7c-b991-b3178354c467-kube-api-access-bjkst\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.391966 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-fernet-keys\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.399027 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-config-data\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.400065 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-fernet-keys\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.412844 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjkst\" (UniqueName: \"kubernetes.io/projected/9b3dad17-c011-4c7c-b991-b3178354c467-kube-api-access-bjkst\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.413606 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-combined-ca-bundle\") pod \"keystone-cron-29412421-9bc25\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:00 crc kubenswrapper[4810]: I1203 07:01:00.502043 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:01 crc kubenswrapper[4810]: I1203 07:01:01.069470 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412421-9bc25"] Dec 03 07:01:01 crc kubenswrapper[4810]: W1203 07:01:01.079423 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b3dad17_c011_4c7c_b991_b3178354c467.slice/crio-acdd64bc77167965f3801e1ea067532ce5b29fadf55fc0b3d7a8b6c78943647a WatchSource:0}: Error finding container acdd64bc77167965f3801e1ea067532ce5b29fadf55fc0b3d7a8b6c78943647a: Status 404 returned error can't find the container with id acdd64bc77167965f3801e1ea067532ce5b29fadf55fc0b3d7a8b6c78943647a Dec 03 07:01:01 crc kubenswrapper[4810]: I1203 07:01:01.718320 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412421-9bc25" event={"ID":"9b3dad17-c011-4c7c-b991-b3178354c467","Type":"ContainerStarted","Data":"2af5d9bbe01f1d6425724b8fcf3c369d303f7487c83e947f3cd181eba13d8388"} Dec 03 07:01:01 crc kubenswrapper[4810]: I1203 07:01:01.718572 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412421-9bc25" event={"ID":"9b3dad17-c011-4c7c-b991-b3178354c467","Type":"ContainerStarted","Data":"acdd64bc77167965f3801e1ea067532ce5b29fadf55fc0b3d7a8b6c78943647a"} Dec 03 07:01:01 crc kubenswrapper[4810]: I1203 07:01:01.732008 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29412421-9bc25" podStartSLOduration=1.731991058 podStartE2EDuration="1.731991058s" podCreationTimestamp="2025-12-03 07:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:01:01.731258769 +0000 UTC m=+4785.666719600" watchObservedRunningTime="2025-12-03 07:01:01.731991058 +0000 UTC m=+4785.667451899" Dec 03 07:01:03 crc kubenswrapper[4810]: I1203 07:01:03.120029 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-sb4vh_30155c9d-effd-4dd4-8893-afcf98aa730d/cert-manager-controller/0.log" Dec 03 07:01:03 crc kubenswrapper[4810]: I1203 07:01:03.344541 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-cg57k_5bac17b1-b4d3-423f-8303-219d40d0c765/cert-manager-webhook/0.log" Dec 03 07:01:03 crc kubenswrapper[4810]: I1203 07:01:03.380932 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-9d7bx_d895f771-5889-476b-9f46-1a2930561552/cert-manager-cainjector/0.log" Dec 03 07:01:04 crc kubenswrapper[4810]: I1203 07:01:04.744577 4810 generic.go:334] "Generic (PLEG): container finished" podID="9b3dad17-c011-4c7c-b991-b3178354c467" containerID="2af5d9bbe01f1d6425724b8fcf3c369d303f7487c83e947f3cd181eba13d8388" exitCode=0 Dec 03 07:01:04 crc kubenswrapper[4810]: I1203 07:01:04.744918 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412421-9bc25" event={"ID":"9b3dad17-c011-4c7c-b991-b3178354c467","Type":"ContainerDied","Data":"2af5d9bbe01f1d6425724b8fcf3c369d303f7487c83e947f3cd181eba13d8388"} Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.096100 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.207621 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjkst\" (UniqueName: \"kubernetes.io/projected/9b3dad17-c011-4c7c-b991-b3178354c467-kube-api-access-bjkst\") pod \"9b3dad17-c011-4c7c-b991-b3178354c467\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.207685 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-config-data\") pod \"9b3dad17-c011-4c7c-b991-b3178354c467\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.207848 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-fernet-keys\") pod \"9b3dad17-c011-4c7c-b991-b3178354c467\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.207910 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-combined-ca-bundle\") pod \"9b3dad17-c011-4c7c-b991-b3178354c467\" (UID: \"9b3dad17-c011-4c7c-b991-b3178354c467\") " Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.227696 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9b3dad17-c011-4c7c-b991-b3178354c467" (UID: "9b3dad17-c011-4c7c-b991-b3178354c467"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.227874 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b3dad17-c011-4c7c-b991-b3178354c467-kube-api-access-bjkst" (OuterVolumeSpecName: "kube-api-access-bjkst") pod "9b3dad17-c011-4c7c-b991-b3178354c467" (UID: "9b3dad17-c011-4c7c-b991-b3178354c467"). InnerVolumeSpecName "kube-api-access-bjkst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.240022 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b3dad17-c011-4c7c-b991-b3178354c467" (UID: "9b3dad17-c011-4c7c-b991-b3178354c467"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.261060 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-config-data" (OuterVolumeSpecName: "config-data") pod "9b3dad17-c011-4c7c-b991-b3178354c467" (UID: "9b3dad17-c011-4c7c-b991-b3178354c467"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.309778 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjkst\" (UniqueName: \"kubernetes.io/projected/9b3dad17-c011-4c7c-b991-b3178354c467-kube-api-access-bjkst\") on node \"crc\" DevicePath \"\"" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.309820 4810 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.309835 4810 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.309847 4810 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b3dad17-c011-4c7c-b991-b3178354c467-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.764123 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412421-9bc25" event={"ID":"9b3dad17-c011-4c7c-b991-b3178354c467","Type":"ContainerDied","Data":"acdd64bc77167965f3801e1ea067532ce5b29fadf55fc0b3d7a8b6c78943647a"} Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.764178 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acdd64bc77167965f3801e1ea067532ce5b29fadf55fc0b3d7a8b6c78943647a" Dec 03 07:01:06 crc kubenswrapper[4810]: I1203 07:01:06.764475 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412421-9bc25" Dec 03 07:01:16 crc kubenswrapper[4810]: I1203 07:01:16.801156 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-6drbj_aeb379d5-f663-4858-a0b5-27614ecf83e9/nmstate-console-plugin/0.log" Dec 03 07:01:16 crc kubenswrapper[4810]: I1203 07:01:16.976338 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wvjvm_a70a2c16-7c9a-45aa-b91d-7d6f44b821c6/nmstate-handler/0.log" Dec 03 07:01:17 crc kubenswrapper[4810]: I1203 07:01:17.015045 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ln57t_430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b/kube-rbac-proxy/0.log" Dec 03 07:01:17 crc kubenswrapper[4810]: I1203 07:01:17.021917 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ln57t_430bfc6b-2d80-4c02-8b7e-7e7eb7880b2b/nmstate-metrics/0.log" Dec 03 07:01:17 crc kubenswrapper[4810]: I1203 07:01:17.145629 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-ppp7c_0e12845f-64f7-48a4-8297-25f3eef40777/nmstate-operator/0.log" Dec 03 07:01:17 crc kubenswrapper[4810]: I1203 07:01:17.189688 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-sqp7g_283887bd-09bf-4f88-81f4-efb5ade1b9de/nmstate-webhook/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.458552 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6nc6q_edef3e76-a314-4543-9a0b-592d76cbed2a/kube-rbac-proxy/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.592356 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6nc6q_edef3e76-a314-4543-9a0b-592d76cbed2a/controller/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.757089 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.931827 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.982056 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.983912 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 07:01:34 crc kubenswrapper[4810]: I1203 07:01:34.997170 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.201649 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.237605 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.237789 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.276246 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.451530 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-reloader/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.453717 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-frr-files/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.471351 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/controller/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.473834 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/cp-metrics/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.633903 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/frr-metrics/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.672093 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/kube-rbac-proxy/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.725396 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/kube-rbac-proxy-frr/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.889274 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/reloader/0.log" Dec 03 07:01:35 crc kubenswrapper[4810]: I1203 07:01:35.975768 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-5j7rx_861b1b72-05ca-4e91-a015-64939a072ec2/frr-k8s-webhook-server/0.log" Dec 03 07:01:36 crc kubenswrapper[4810]: I1203 07:01:36.110011 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-579cbb8d94-dxs7n_43572253-c54e-4ba2-b805-7a9521a015cd/manager/0.log" Dec 03 07:01:36 crc kubenswrapper[4810]: I1203 07:01:36.434642 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dfb784c-hjvnl_5406f1e2-94a8-4c3c-b154-e1448775314a/webhook-server/0.log" Dec 03 07:01:36 crc kubenswrapper[4810]: I1203 07:01:36.591704 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hpcxn_1bf9e275-7fc1-43d1-b95a-b19e459fda0c/kube-rbac-proxy/0.log" Dec 03 07:01:36 crc kubenswrapper[4810]: I1203 07:01:36.642991 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-klmlk_953ec342-87c7-4803-96cf-bf3e6e4592aa/frr/0.log" Dec 03 07:01:37 crc kubenswrapper[4810]: I1203 07:01:37.005675 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hpcxn_1bf9e275-7fc1-43d1-b95a-b19e459fda0c/speaker/0.log" Dec 03 07:01:51 crc kubenswrapper[4810]: I1203 07:01:51.911186 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/util/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.050964 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/util/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.089051 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/pull/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.144543 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/pull/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.338403 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/pull/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.358086 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/extract/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.393161 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fvxgqz_f9912c00-ee4e-47fa-a724-f7518c8c61b6/util/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.496273 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/util/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.684274 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/pull/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.701422 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/util/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.732391 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/pull/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.880302 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/util/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.883339 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/extract/0.log" Dec 03 07:01:52 crc kubenswrapper[4810]: I1203 07:01:52.885721 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83cj44t_fc6d4c38-ea04-420d-b505-92115a81cb40/pull/0.log" Dec 03 07:01:53 crc kubenswrapper[4810]: I1203 07:01:53.744512 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-utilities/0.log" Dec 03 07:01:53 crc kubenswrapper[4810]: I1203 07:01:53.961379 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-utilities/0.log" Dec 03 07:01:53 crc kubenswrapper[4810]: I1203 07:01:53.983150 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-content/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.007393 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-content/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.146446 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-utilities/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.184392 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/extract-content/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.506483 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-utilities/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.706910 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tr27d_14e683a3-a7b9-4f89-a03d-bdda8882df24/registry-server/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.751590 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-content/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.770771 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-utilities/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.777368 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-content/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.926223 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-utilities/0.log" Dec 03 07:01:54 crc kubenswrapper[4810]: I1203 07:01:54.935384 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/extract-content/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.169162 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xwgs5_1de26693-7bac-4062-8ed2-d7f84510be17/marketplace-operator/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.262715 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-utilities/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.428822 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-content/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.472961 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-utilities/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.484298 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-content/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.580367 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5xcsg_21d02e76-e70c-46ad-848f-bfdee9c9f81f/registry-server/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.759963 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-content/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.820083 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/registry-server/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.854991 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-utilities/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.858213 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qbzdc_dc2d13cd-9117-4326-a6b1-3bdbcf6b81f7/extract-utilities/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.965439 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-content/0.log" Dec 03 07:01:55 crc kubenswrapper[4810]: I1203 07:01:55.991209 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-utilities/0.log" Dec 03 07:01:56 crc kubenswrapper[4810]: I1203 07:01:56.025664 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-content/0.log" Dec 03 07:01:56 crc kubenswrapper[4810]: I1203 07:01:56.215503 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-utilities/0.log" Dec 03 07:01:56 crc kubenswrapper[4810]: I1203 07:01:56.217009 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/extract-content/0.log" Dec 03 07:01:56 crc kubenswrapper[4810]: I1203 07:01:56.813206 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hmz68_f2171748-a3a1-4f3a-b0b4-b0e5ab8d588f/registry-server/0.log" Dec 03 07:02:30 crc kubenswrapper[4810]: E1203 07:02:30.547022 4810 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.23:41448->38.102.83.23:39931: write tcp 38.102.83.23:41448->38.102.83.23:39931: write: broken pipe Dec 03 07:03:25 crc kubenswrapper[4810]: I1203 07:03:25.677072 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:03:25 crc kubenswrapper[4810]: I1203 07:03:25.677655 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:03:36 crc kubenswrapper[4810]: I1203 07:03:36.353718 4810 generic.go:334] "Generic (PLEG): container finished" podID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerID="fc50315b6f65b6d4d67f7816df90478bb1d57e7c3a3906f957ce28df770531ca" exitCode=0 Dec 03 07:03:36 crc kubenswrapper[4810]: I1203 07:03:36.353843 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" event={"ID":"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc","Type":"ContainerDied","Data":"fc50315b6f65b6d4d67f7816df90478bb1d57e7c3a3906f957ce28df770531ca"} Dec 03 07:03:36 crc kubenswrapper[4810]: I1203 07:03:36.354845 4810 scope.go:117] "RemoveContainer" containerID="fc50315b6f65b6d4d67f7816df90478bb1d57e7c3a3906f957ce28df770531ca" Dec 03 07:03:37 crc kubenswrapper[4810]: I1203 07:03:37.371959 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wgrf4_must-gather-xxkv2_a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc/gather/0.log" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.231238 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wgrf4/must-gather-xxkv2"] Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.232103 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="copy" containerID="cri-o://9cb2fd8476445d1caff2de8b1c57140b57e0d0670b8669555892f93a8048159f" gracePeriod=2 Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.241537 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wgrf4/must-gather-xxkv2"] Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.514505 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wgrf4_must-gather-xxkv2_a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc/copy/0.log" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.515332 4810 generic.go:334] "Generic (PLEG): container finished" podID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerID="9cb2fd8476445d1caff2de8b1c57140b57e0d0670b8669555892f93a8048159f" exitCode=143 Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.667225 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wgrf4_must-gather-xxkv2_a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc/copy/0.log" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.667653 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.732890 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-must-gather-output\") pod \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.732953 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmtjd\" (UniqueName: \"kubernetes.io/projected/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-kube-api-access-jmtjd\") pod \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\" (UID: \"a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc\") " Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.738868 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-kube-api-access-jmtjd" (OuterVolumeSpecName: "kube-api-access-jmtjd") pod "a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" (UID: "a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc"). InnerVolumeSpecName "kube-api-access-jmtjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.835775 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmtjd\" (UniqueName: \"kubernetes.io/projected/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-kube-api-access-jmtjd\") on node \"crc\" DevicePath \"\"" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.882501 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" (UID: "a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:03:48 crc kubenswrapper[4810]: I1203 07:03:48.936983 4810 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 07:03:49 crc kubenswrapper[4810]: I1203 07:03:49.539967 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wgrf4_must-gather-xxkv2_a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc/copy/0.log" Dec 03 07:03:49 crc kubenswrapper[4810]: I1203 07:03:49.541222 4810 scope.go:117] "RemoveContainer" containerID="9cb2fd8476445d1caff2de8b1c57140b57e0d0670b8669555892f93a8048159f" Dec 03 07:03:49 crc kubenswrapper[4810]: I1203 07:03:49.541361 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wgrf4/must-gather-xxkv2" Dec 03 07:03:49 crc kubenswrapper[4810]: I1203 07:03:49.579186 4810 scope.go:117] "RemoveContainer" containerID="fc50315b6f65b6d4d67f7816df90478bb1d57e7c3a3906f957ce28df770531ca" Dec 03 07:03:50 crc kubenswrapper[4810]: I1203 07:03:50.391505 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" path="/var/lib/kubelet/pods/a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc/volumes" Dec 03 07:03:55 crc kubenswrapper[4810]: I1203 07:03:55.676953 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:03:55 crc kubenswrapper[4810]: I1203 07:03:55.679146 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.677168 4810 patch_prober.go:28] interesting pod/machine-config-daemon-2hd85 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.677719 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.677783 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.678556 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dbcf107a210ea6b30b33641897e4e8e02bd8409638d7aa3bcc20f85c3b6914da"} pod="openshift-machine-config-operator/machine-config-daemon-2hd85" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.678645 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" podUID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerName="machine-config-daemon" containerID="cri-o://dbcf107a210ea6b30b33641897e4e8e02bd8409638d7aa3bcc20f85c3b6914da" gracePeriod=600 Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.910822 4810 generic.go:334] "Generic (PLEG): container finished" podID="bc7906ed-7d0a-444b-8e14-12c67bc3301e" containerID="dbcf107a210ea6b30b33641897e4e8e02bd8409638d7aa3bcc20f85c3b6914da" exitCode=0 Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.911031 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerDied","Data":"dbcf107a210ea6b30b33641897e4e8e02bd8409638d7aa3bcc20f85c3b6914da"} Dec 03 07:04:25 crc kubenswrapper[4810]: I1203 07:04:25.911120 4810 scope.go:117] "RemoveContainer" containerID="e75f4401de1acea1b703065a24838ce23d3cc6cb3518d3e010043289537f7d38" Dec 03 07:04:26 crc kubenswrapper[4810]: I1203 07:04:26.924052 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2hd85" event={"ID":"bc7906ed-7d0a-444b-8e14-12c67bc3301e","Type":"ContainerStarted","Data":"ee15346e51814e8769ddcd45807c3a8941cca247533f7cbe66cfc309b1e0cd99"} Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.827201 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rw54t"] Dec 03 07:04:49 crc kubenswrapper[4810]: E1203 07:04:49.828017 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="copy" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.828029 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="copy" Dec 03 07:04:49 crc kubenswrapper[4810]: E1203 07:04:49.828058 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b3dad17-c011-4c7c-b991-b3178354c467" containerName="keystone-cron" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.828064 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b3dad17-c011-4c7c-b991-b3178354c467" containerName="keystone-cron" Dec 03 07:04:49 crc kubenswrapper[4810]: E1203 07:04:49.828072 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="gather" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.828078 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="gather" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.828305 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b3dad17-c011-4c7c-b991-b3178354c467" containerName="keystone-cron" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.828323 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="gather" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.828337 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a100cdd1-0e2e-4ed2-b0b7-05743e4c94dc" containerName="copy" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.830113 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.861037 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rw54t"] Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.949636 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-utilities\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.949868 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-catalog-content\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:49 crc kubenswrapper[4810]: I1203 07:04:49.950218 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv4xd\" (UniqueName: \"kubernetes.io/projected/855cc1b9-ceff-4d73-8776-9458e5ece9ec-kube-api-access-qv4xd\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.052210 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv4xd\" (UniqueName: \"kubernetes.io/projected/855cc1b9-ceff-4d73-8776-9458e5ece9ec-kube-api-access-qv4xd\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.052314 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-utilities\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.052356 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-catalog-content\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.052888 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-catalog-content\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.052984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-utilities\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.080559 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv4xd\" (UniqueName: \"kubernetes.io/projected/855cc1b9-ceff-4d73-8776-9458e5ece9ec-kube-api-access-qv4xd\") pod \"redhat-marketplace-rw54t\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.152294 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:04:50 crc kubenswrapper[4810]: I1203 07:04:50.721605 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rw54t"] Dec 03 07:04:52 crc kubenswrapper[4810]: I1203 07:04:52.187373 4810 generic.go:334] "Generic (PLEG): container finished" podID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerID="65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011" exitCode=0 Dec 03 07:04:52 crc kubenswrapper[4810]: I1203 07:04:52.187417 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerDied","Data":"65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011"} Dec 03 07:04:52 crc kubenswrapper[4810]: I1203 07:04:52.187924 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerStarted","Data":"0013268f66f3acaf66311a3c6d4909dc9905effa8bd1f7177b4661ebb5155f6a"} Dec 03 07:04:52 crc kubenswrapper[4810]: I1203 07:04:52.191125 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 07:04:53 crc kubenswrapper[4810]: I1203 07:04:53.197297 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerStarted","Data":"c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38"} Dec 03 07:04:54 crc kubenswrapper[4810]: I1203 07:04:54.214183 4810 generic.go:334] "Generic (PLEG): container finished" podID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerID="c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38" exitCode=0 Dec 03 07:04:54 crc kubenswrapper[4810]: I1203 07:04:54.214524 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerDied","Data":"c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38"} Dec 03 07:04:55 crc kubenswrapper[4810]: I1203 07:04:55.228639 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerStarted","Data":"d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37"} Dec 03 07:04:55 crc kubenswrapper[4810]: I1203 07:04:55.258078 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rw54t" podStartSLOduration=3.826424249 podStartE2EDuration="6.25805311s" podCreationTimestamp="2025-12-03 07:04:49 +0000 UTC" firstStartedPulling="2025-12-03 07:04:52.190870758 +0000 UTC m=+5016.126331599" lastFinishedPulling="2025-12-03 07:04:54.622499599 +0000 UTC m=+5018.557960460" observedRunningTime="2025-12-03 07:04:55.24892535 +0000 UTC m=+5019.184386231" watchObservedRunningTime="2025-12-03 07:04:55.25805311 +0000 UTC m=+5019.193513951" Dec 03 07:05:00 crc kubenswrapper[4810]: I1203 07:05:00.152499 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:05:00 crc kubenswrapper[4810]: I1203 07:05:00.153124 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:05:00 crc kubenswrapper[4810]: I1203 07:05:00.972814 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:05:01 crc kubenswrapper[4810]: I1203 07:05:01.030189 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:05:01 crc kubenswrapper[4810]: I1203 07:05:01.220268 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rw54t"] Dec 03 07:05:02 crc kubenswrapper[4810]: I1203 07:05:02.306248 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rw54t" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="registry-server" containerID="cri-o://d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37" gracePeriod=2 Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.295070 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.318833 4810 generic.go:334] "Generic (PLEG): container finished" podID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerID="d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37" exitCode=0 Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.318882 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rw54t" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.318890 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerDied","Data":"d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37"} Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.318997 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rw54t" event={"ID":"855cc1b9-ceff-4d73-8776-9458e5ece9ec","Type":"ContainerDied","Data":"0013268f66f3acaf66311a3c6d4909dc9905effa8bd1f7177b4661ebb5155f6a"} Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.319015 4810 scope.go:117] "RemoveContainer" containerID="d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.322357 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-utilities\") pod \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.322431 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv4xd\" (UniqueName: \"kubernetes.io/projected/855cc1b9-ceff-4d73-8776-9458e5ece9ec-kube-api-access-qv4xd\") pod \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.323251 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-utilities" (OuterVolumeSpecName: "utilities") pod "855cc1b9-ceff-4d73-8776-9458e5ece9ec" (UID: "855cc1b9-ceff-4d73-8776-9458e5ece9ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.323382 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-catalog-content\") pod \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\" (UID: \"855cc1b9-ceff-4d73-8776-9458e5ece9ec\") " Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.323865 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.329280 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/855cc1b9-ceff-4d73-8776-9458e5ece9ec-kube-api-access-qv4xd" (OuterVolumeSpecName: "kube-api-access-qv4xd") pod "855cc1b9-ceff-4d73-8776-9458e5ece9ec" (UID: "855cc1b9-ceff-4d73-8776-9458e5ece9ec"). InnerVolumeSpecName "kube-api-access-qv4xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.346728 4810 scope.go:117] "RemoveContainer" containerID="c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.361255 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "855cc1b9-ceff-4d73-8776-9458e5ece9ec" (UID: "855cc1b9-ceff-4d73-8776-9458e5ece9ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.405252 4810 scope.go:117] "RemoveContainer" containerID="65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.425084 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/855cc1b9-ceff-4d73-8776-9458e5ece9ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.426030 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv4xd\" (UniqueName: \"kubernetes.io/projected/855cc1b9-ceff-4d73-8776-9458e5ece9ec-kube-api-access-qv4xd\") on node \"crc\" DevicePath \"\"" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.460406 4810 scope.go:117] "RemoveContainer" containerID="d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37" Dec 03 07:05:03 crc kubenswrapper[4810]: E1203 07:05:03.460869 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37\": container with ID starting with d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37 not found: ID does not exist" containerID="d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.461012 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37"} err="failed to get container status \"d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37\": rpc error: code = NotFound desc = could not find container \"d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37\": container with ID starting with d7bfc4c88b9b25c31255c092e60bbfca379d62f502a75c3cac57f2688a898e37 not found: ID does not exist" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.461092 4810 scope.go:117] "RemoveContainer" containerID="c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38" Dec 03 07:05:03 crc kubenswrapper[4810]: E1203 07:05:03.461485 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38\": container with ID starting with c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38 not found: ID does not exist" containerID="c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.461568 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38"} err="failed to get container status \"c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38\": rpc error: code = NotFound desc = could not find container \"c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38\": container with ID starting with c5a1c0829a4694b75c406191f388c640da59a02e29c00f19d5dab57e143c9c38 not found: ID does not exist" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.461607 4810 scope.go:117] "RemoveContainer" containerID="65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011" Dec 03 07:05:03 crc kubenswrapper[4810]: E1203 07:05:03.461959 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011\": container with ID starting with 65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011 not found: ID does not exist" containerID="65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.462024 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011"} err="failed to get container status \"65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011\": rpc error: code = NotFound desc = could not find container \"65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011\": container with ID starting with 65e8abc082ecda7e28c7ba5a94984abe02d0e9302268f257a9a4de5d7b19b011 not found: ID does not exist" Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.652502 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rw54t"] Dec 03 07:05:03 crc kubenswrapper[4810]: I1203 07:05:03.664907 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rw54t"] Dec 03 07:05:04 crc kubenswrapper[4810]: I1203 07:05:04.394817 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" path="/var/lib/kubelet/pods/855cc1b9-ceff-4d73-8776-9458e5ece9ec/volumes" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.648963 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qgrqs"] Dec 03 07:05:22 crc kubenswrapper[4810]: E1203 07:05:22.649977 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="registry-server" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.649994 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="registry-server" Dec 03 07:05:22 crc kubenswrapper[4810]: E1203 07:05:22.650026 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="extract-content" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.650034 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="extract-content" Dec 03 07:05:22 crc kubenswrapper[4810]: E1203 07:05:22.650056 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="extract-utilities" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.650065 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="extract-utilities" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.650299 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="855cc1b9-ceff-4d73-8776-9458e5ece9ec" containerName="registry-server" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.653270 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.670354 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qgrqs"] Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.747375 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-utilities\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.747445 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-catalog-content\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.747537 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxg8b\" (UniqueName: \"kubernetes.io/projected/aad2b258-7c4e-483e-abca-03ba047c4811-kube-api-access-kxg8b\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.849124 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-utilities\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.849216 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-catalog-content\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.849400 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxg8b\" (UniqueName: \"kubernetes.io/projected/aad2b258-7c4e-483e-abca-03ba047c4811-kube-api-access-kxg8b\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.849652 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-utilities\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.849909 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-catalog-content\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.874160 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxg8b\" (UniqueName: \"kubernetes.io/projected/aad2b258-7c4e-483e-abca-03ba047c4811-kube-api-access-kxg8b\") pod \"certified-operators-qgrqs\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:22 crc kubenswrapper[4810]: I1203 07:05:22.985533 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:23 crc kubenswrapper[4810]: I1203 07:05:23.504001 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qgrqs"] Dec 03 07:05:23 crc kubenswrapper[4810]: I1203 07:05:23.637595 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerStarted","Data":"b4b6b6db8ce304af805ee4e02e8c5f20d2b595d84b82ed61bedf8e63b687d788"} Dec 03 07:05:24 crc kubenswrapper[4810]: I1203 07:05:24.646921 4810 generic.go:334] "Generic (PLEG): container finished" podID="aad2b258-7c4e-483e-abca-03ba047c4811" containerID="5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4" exitCode=0 Dec 03 07:05:24 crc kubenswrapper[4810]: I1203 07:05:24.646972 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerDied","Data":"5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4"} Dec 03 07:05:25 crc kubenswrapper[4810]: I1203 07:05:25.665107 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerStarted","Data":"71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72"} Dec 03 07:05:26 crc kubenswrapper[4810]: I1203 07:05:26.676667 4810 generic.go:334] "Generic (PLEG): container finished" podID="aad2b258-7c4e-483e-abca-03ba047c4811" containerID="71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72" exitCode=0 Dec 03 07:05:26 crc kubenswrapper[4810]: I1203 07:05:26.676729 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerDied","Data":"71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72"} Dec 03 07:05:27 crc kubenswrapper[4810]: I1203 07:05:27.687439 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerStarted","Data":"de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641"} Dec 03 07:05:27 crc kubenswrapper[4810]: I1203 07:05:27.715377 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qgrqs" podStartSLOduration=3.303263438 podStartE2EDuration="5.715355334s" podCreationTimestamp="2025-12-03 07:05:22 +0000 UTC" firstStartedPulling="2025-12-03 07:05:24.64884751 +0000 UTC m=+5048.584308351" lastFinishedPulling="2025-12-03 07:05:27.060939406 +0000 UTC m=+5050.996400247" observedRunningTime="2025-12-03 07:05:27.704268802 +0000 UTC m=+5051.639729643" watchObservedRunningTime="2025-12-03 07:05:27.715355334 +0000 UTC m=+5051.650816175" Dec 03 07:05:31 crc kubenswrapper[4810]: I1203 07:05:31.142576 4810 scope.go:117] "RemoveContainer" containerID="24b3aada9f80e63b096a0c81b308b270f15404bbcd3d7e2044c090e3d52f8e12" Dec 03 07:05:32 crc kubenswrapper[4810]: I1203 07:05:32.986581 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:32 crc kubenswrapper[4810]: I1203 07:05:32.987069 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:33 crc kubenswrapper[4810]: I1203 07:05:33.056970 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:33 crc kubenswrapper[4810]: I1203 07:05:33.833139 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:33 crc kubenswrapper[4810]: I1203 07:05:33.931673 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qgrqs"] Dec 03 07:05:35 crc kubenswrapper[4810]: I1203 07:05:35.774461 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qgrqs" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="registry-server" containerID="cri-o://de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641" gracePeriod=2 Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.742190 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.792865 4810 generic.go:334] "Generic (PLEG): container finished" podID="aad2b258-7c4e-483e-abca-03ba047c4811" containerID="de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641" exitCode=0 Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.792996 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgrqs" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.792978 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerDied","Data":"de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641"} Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.793175 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgrqs" event={"ID":"aad2b258-7c4e-483e-abca-03ba047c4811","Type":"ContainerDied","Data":"b4b6b6db8ce304af805ee4e02e8c5f20d2b595d84b82ed61bedf8e63b687d788"} Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.793207 4810 scope.go:117] "RemoveContainer" containerID="de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.823798 4810 scope.go:117] "RemoveContainer" containerID="71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.837289 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-utilities\") pod \"aad2b258-7c4e-483e-abca-03ba047c4811\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.837462 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxg8b\" (UniqueName: \"kubernetes.io/projected/aad2b258-7c4e-483e-abca-03ba047c4811-kube-api-access-kxg8b\") pod \"aad2b258-7c4e-483e-abca-03ba047c4811\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.837542 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-catalog-content\") pod \"aad2b258-7c4e-483e-abca-03ba047c4811\" (UID: \"aad2b258-7c4e-483e-abca-03ba047c4811\") " Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.838713 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-utilities" (OuterVolumeSpecName: "utilities") pod "aad2b258-7c4e-483e-abca-03ba047c4811" (UID: "aad2b258-7c4e-483e-abca-03ba047c4811"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.846001 4810 scope.go:117] "RemoveContainer" containerID="5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.846022 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aad2b258-7c4e-483e-abca-03ba047c4811-kube-api-access-kxg8b" (OuterVolumeSpecName: "kube-api-access-kxg8b") pod "aad2b258-7c4e-483e-abca-03ba047c4811" (UID: "aad2b258-7c4e-483e-abca-03ba047c4811"). InnerVolumeSpecName "kube-api-access-kxg8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.889713 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aad2b258-7c4e-483e-abca-03ba047c4811" (UID: "aad2b258-7c4e-483e-abca-03ba047c4811"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.939305 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.939339 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad2b258-7c4e-483e-abca-03ba047c4811-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.939354 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxg8b\" (UniqueName: \"kubernetes.io/projected/aad2b258-7c4e-483e-abca-03ba047c4811-kube-api-access-kxg8b\") on node \"crc\" DevicePath \"\"" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.940753 4810 scope.go:117] "RemoveContainer" containerID="de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641" Dec 03 07:05:36 crc kubenswrapper[4810]: E1203 07:05:36.941307 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641\": container with ID starting with de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641 not found: ID does not exist" containerID="de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.941357 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641"} err="failed to get container status \"de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641\": rpc error: code = NotFound desc = could not find container \"de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641\": container with ID starting with de0c9b5a239e835e3768ef85169ed6ea605730ba8be4b7c0c5b3f70a7a772641 not found: ID does not exist" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.941398 4810 scope.go:117] "RemoveContainer" containerID="71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72" Dec 03 07:05:36 crc kubenswrapper[4810]: E1203 07:05:36.941711 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72\": container with ID starting with 71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72 not found: ID does not exist" containerID="71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.941772 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72"} err="failed to get container status \"71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72\": rpc error: code = NotFound desc = could not find container \"71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72\": container with ID starting with 71ab972717ff43de9be1d67e45633528164ab912121bbb6e1a766ef4b6a02b72 not found: ID does not exist" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.941793 4810 scope.go:117] "RemoveContainer" containerID="5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4" Dec 03 07:05:36 crc kubenswrapper[4810]: E1203 07:05:36.942155 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4\": container with ID starting with 5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4 not found: ID does not exist" containerID="5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4" Dec 03 07:05:36 crc kubenswrapper[4810]: I1203 07:05:36.942181 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4"} err="failed to get container status \"5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4\": rpc error: code = NotFound desc = could not find container \"5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4\": container with ID starting with 5d78b4f5c62d05d419021c8c579c11169dc9512c0aff67f5191167263729d7a4 not found: ID does not exist" Dec 03 07:05:37 crc kubenswrapper[4810]: I1203 07:05:37.132640 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qgrqs"] Dec 03 07:05:37 crc kubenswrapper[4810]: I1203 07:05:37.141617 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qgrqs"] Dec 03 07:05:38 crc kubenswrapper[4810]: I1203 07:05:38.393279 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" path="/var/lib/kubelet/pods/aad2b258-7c4e-483e-abca-03ba047c4811/volumes" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.456114 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5qs9l"] Dec 03 07:06:40 crc kubenswrapper[4810]: E1203 07:06:40.457238 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="extract-utilities" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.457260 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="extract-utilities" Dec 03 07:06:40 crc kubenswrapper[4810]: E1203 07:06:40.457281 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="extract-content" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.457293 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="extract-content" Dec 03 07:06:40 crc kubenswrapper[4810]: E1203 07:06:40.457327 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="registry-server" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.457339 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="registry-server" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.457696 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="aad2b258-7c4e-483e-abca-03ba047c4811" containerName="registry-server" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.459996 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.479568 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5qs9l"] Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.612844 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpqbn\" (UniqueName: \"kubernetes.io/projected/e78475a7-f113-4c92-bbd0-33af8c581176-kube-api-access-gpqbn\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.612957 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78475a7-f113-4c92-bbd0-33af8c581176-utilities\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.612975 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78475a7-f113-4c92-bbd0-33af8c581176-catalog-content\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.714485 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpqbn\" (UniqueName: \"kubernetes.io/projected/e78475a7-f113-4c92-bbd0-33af8c581176-kube-api-access-gpqbn\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.714584 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78475a7-f113-4c92-bbd0-33af8c581176-utilities\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.714607 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78475a7-f113-4c92-bbd0-33af8c581176-catalog-content\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.715125 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e78475a7-f113-4c92-bbd0-33af8c581176-utilities\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.715142 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e78475a7-f113-4c92-bbd0-33af8c581176-catalog-content\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.757781 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpqbn\" (UniqueName: \"kubernetes.io/projected/e78475a7-f113-4c92-bbd0-33af8c581176-kube-api-access-gpqbn\") pod \"community-operators-5qs9l\" (UID: \"e78475a7-f113-4c92-bbd0-33af8c581176\") " pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:40 crc kubenswrapper[4810]: I1203 07:06:40.808586 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5qs9l" Dec 03 07:06:41 crc kubenswrapper[4810]: I1203 07:06:41.311213 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5qs9l"] Dec 03 07:06:41 crc kubenswrapper[4810]: I1203 07:06:41.480722 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qs9l" event={"ID":"e78475a7-f113-4c92-bbd0-33af8c581176","Type":"ContainerStarted","Data":"ceb712eb1c445b2dd2877ec3bde57d2048874a643a94ce00f03a5e3c7a873dca"} Dec 03 07:06:42 crc kubenswrapper[4810]: I1203 07:06:42.491958 4810 generic.go:334] "Generic (PLEG): container finished" podID="e78475a7-f113-4c92-bbd0-33af8c581176" containerID="82a57476672b8924f101270e44a87b23c49520a77667765fbdbf003085302cbb" exitCode=0 Dec 03 07:06:42 crc kubenswrapper[4810]: I1203 07:06:42.492090 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qs9l" event={"ID":"e78475a7-f113-4c92-bbd0-33af8c581176","Type":"ContainerDied","Data":"82a57476672b8924f101270e44a87b23c49520a77667765fbdbf003085302cbb"} Dec 03 07:06:43 crc kubenswrapper[4810]: I1203 07:06:43.505508 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qs9l" event={"ID":"e78475a7-f113-4c92-bbd0-33af8c581176","Type":"ContainerStarted","Data":"e280ca658df7c4cc3a3ca4c5928e9dbca220276704398ec8a3ad3ec34bc04a31"} Dec 03 07:06:44 crc kubenswrapper[4810]: I1203 07:06:44.515403 4810 generic.go:334] "Generic (PLEG): container finished" podID="e78475a7-f113-4c92-bbd0-33af8c581176" containerID="e280ca658df7c4cc3a3ca4c5928e9dbca220276704398ec8a3ad3ec34bc04a31" exitCode=0 Dec 03 07:06:44 crc kubenswrapper[4810]: I1203 07:06:44.515443 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qs9l" event={"ID":"e78475a7-f113-4c92-bbd0-33af8c581176","Type":"ContainerDied","Data":"e280ca658df7c4cc3a3ca4c5928e9dbca220276704398ec8a3ad3ec34bc04a31"} Dec 03 07:06:45 crc kubenswrapper[4810]: I1203 07:06:45.528973 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qs9l" event={"ID":"e78475a7-f113-4c92-bbd0-33af8c581176","Type":"ContainerStarted","Data":"e6e3606daf77d0af89910ca911b0fc0ecee6c6840873c48db5f5721d195dec25"} Dec 03 07:06:45 crc kubenswrapper[4810]: I1203 07:06:45.554571 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5qs9l" podStartSLOduration=2.995711625 podStartE2EDuration="5.554552437s" podCreationTimestamp="2025-12-03 07:06:40 +0000 UTC" firstStartedPulling="2025-12-03 07:06:42.494694208 +0000 UTC m=+5126.430155089" lastFinishedPulling="2025-12-03 07:06:45.05353506 +0000 UTC m=+5128.988995901" observedRunningTime="2025-12-03 07:06:45.546871695 +0000 UTC m=+5129.482332576" watchObservedRunningTime="2025-12-03 07:06:45.554552437 +0000 UTC m=+5129.490013288" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113760615024452 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113760615017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113746224016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113746224015462 5ustar corecore